YarnProtos.java

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: yarn_protos.proto

// Protobuf Java Version: 3.25.5
package org.apache.hadoop.yarn.proto;

public final class YarnProtos {
  private YarnProtos() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  /**
   * Protobuf enum {@code hadoop.yarn.ResourceTypesProto}
   */
  public enum ResourceTypesProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>COUNTABLE = 0;</code>
     */
    COUNTABLE(0),
    ;

    /**
     * <code>COUNTABLE = 0;</code>
     */
    public static final int COUNTABLE_VALUE = 0;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ResourceTypesProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ResourceTypesProto forNumber(int value) {
      switch (value) {
        case 0: return COUNTABLE;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ResourceTypesProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ResourceTypesProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ResourceTypesProto>() {
            public ResourceTypesProto findValueByNumber(int number) {
              return ResourceTypesProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(0);
    }

    private static final ResourceTypesProto[] VALUES = values();

    public static ResourceTypesProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ResourceTypesProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ResourceTypesProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ContainerStateProto}
   */
  public enum ContainerStateProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>C_NEW = 1;</code>
     */
    C_NEW(1),
    /**
     * <code>C_RUNNING = 2;</code>
     */
    C_RUNNING(2),
    /**
     * <code>C_COMPLETE = 3;</code>
     */
    C_COMPLETE(3),
    ;

    /**
     * <code>C_NEW = 1;</code>
     */
    public static final int C_NEW_VALUE = 1;
    /**
     * <code>C_RUNNING = 2;</code>
     */
    public static final int C_RUNNING_VALUE = 2;
    /**
     * <code>C_COMPLETE = 3;</code>
     */
    public static final int C_COMPLETE_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ContainerStateProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ContainerStateProto forNumber(int value) {
      switch (value) {
        case 1: return C_NEW;
        case 2: return C_RUNNING;
        case 3: return C_COMPLETE;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerStateProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ContainerStateProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerStateProto>() {
            public ContainerStateProto findValueByNumber(int number) {
              return ContainerStateProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(1);
    }

    private static final ContainerStateProto[] VALUES = values();

    public static ContainerStateProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ContainerStateProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerStateProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ContainerSubStateProto}
   */
  public enum ContainerSubStateProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <pre>
     **
     * NEW, LOCALIZING, SCHEDULED,
     * REINITIALIZING_AWAITING_KILL, RELAUNCHING,
     * </pre>
     *
     * <code>CSS_SCHEDULED = 1;</code>
     */
    CSS_SCHEDULED(1),
    /**
     * <pre>
     **
     * RUNNING, REINITIALIZING, PAUSING, KILLING
     * </pre>
     *
     * <code>CSS_RUNNING = 2;</code>
     */
    CSS_RUNNING(2),
    /**
     * <pre>
     **
     * PAUSED, RESUMING
     * </pre>
     *
     * <code>CSS_PAUSED = 3;</code>
     */
    CSS_PAUSED(3),
    /**
     * <pre>
     **
     * LOCALIZATION_FAILED, EXITED_WITH_SUCCESS,
     * EXITED_WITH_FAILURE,
     * CONTAINER_CLEANEDUP_AFTER_KILL,
     * CONTAINER_RESOURCES_CLEANINGUP
     * </pre>
     *
     * <code>CSS_COMPLETING = 4;</code>
     */
    CSS_COMPLETING(4),
    /**
     * <pre>
     **
     * DONE
     * </pre>
     *
     * <code>CSS_DONE = 5;</code>
     */
    CSS_DONE(5),
    ;

    /**
     * <pre>
     **
     * NEW, LOCALIZING, SCHEDULED,
     * REINITIALIZING_AWAITING_KILL, RELAUNCHING,
     * </pre>
     *
     * <code>CSS_SCHEDULED = 1;</code>
     */
    public static final int CSS_SCHEDULED_VALUE = 1;
    /**
     * <pre>
     **
     * RUNNING, REINITIALIZING, PAUSING, KILLING
     * </pre>
     *
     * <code>CSS_RUNNING = 2;</code>
     */
    public static final int CSS_RUNNING_VALUE = 2;
    /**
     * <pre>
     **
     * PAUSED, RESUMING
     * </pre>
     *
     * <code>CSS_PAUSED = 3;</code>
     */
    public static final int CSS_PAUSED_VALUE = 3;
    /**
     * <pre>
     **
     * LOCALIZATION_FAILED, EXITED_WITH_SUCCESS,
     * EXITED_WITH_FAILURE,
     * CONTAINER_CLEANEDUP_AFTER_KILL,
     * CONTAINER_RESOURCES_CLEANINGUP
     * </pre>
     *
     * <code>CSS_COMPLETING = 4;</code>
     */
    public static final int CSS_COMPLETING_VALUE = 4;
    /**
     * <pre>
     **
     * DONE
     * </pre>
     *
     * <code>CSS_DONE = 5;</code>
     */
    public static final int CSS_DONE_VALUE = 5;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ContainerSubStateProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ContainerSubStateProto forNumber(int value) {
      switch (value) {
        case 1: return CSS_SCHEDULED;
        case 2: return CSS_RUNNING;
        case 3: return CSS_PAUSED;
        case 4: return CSS_COMPLETING;
        case 5: return CSS_DONE;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerSubStateProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ContainerSubStateProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerSubStateProto>() {
            public ContainerSubStateProto findValueByNumber(int number) {
              return ContainerSubStateProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(2);
    }

    private static final ContainerSubStateProto[] VALUES = values();

    public static ContainerSubStateProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ContainerSubStateProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerSubStateProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.YarnApplicationStateProto}
   */
  public enum YarnApplicationStateProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>NEW = 1;</code>
     */
    NEW(1),
    /**
     * <code>NEW_SAVING = 2;</code>
     */
    NEW_SAVING(2),
    /**
     * <code>SUBMITTED = 3;</code>
     */
    SUBMITTED(3),
    /**
     * <code>ACCEPTED = 4;</code>
     */
    ACCEPTED(4),
    /**
     * <code>RUNNING = 5;</code>
     */
    RUNNING(5),
    /**
     * <code>FINISHED = 6;</code>
     */
    FINISHED(6),
    /**
     * <code>FAILED = 7;</code>
     */
    FAILED(7),
    /**
     * <code>KILLED = 8;</code>
     */
    KILLED(8),
    ;

    /**
     * <code>NEW = 1;</code>
     */
    public static final int NEW_VALUE = 1;
    /**
     * <code>NEW_SAVING = 2;</code>
     */
    public static final int NEW_SAVING_VALUE = 2;
    /**
     * <code>SUBMITTED = 3;</code>
     */
    public static final int SUBMITTED_VALUE = 3;
    /**
     * <code>ACCEPTED = 4;</code>
     */
    public static final int ACCEPTED_VALUE = 4;
    /**
     * <code>RUNNING = 5;</code>
     */
    public static final int RUNNING_VALUE = 5;
    /**
     * <code>FINISHED = 6;</code>
     */
    public static final int FINISHED_VALUE = 6;
    /**
     * <code>FAILED = 7;</code>
     */
    public static final int FAILED_VALUE = 7;
    /**
     * <code>KILLED = 8;</code>
     */
    public static final int KILLED_VALUE = 8;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static YarnApplicationStateProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static YarnApplicationStateProto forNumber(int value) {
      switch (value) {
        case 1: return NEW;
        case 2: return NEW_SAVING;
        case 3: return SUBMITTED;
        case 4: return ACCEPTED;
        case 5: return RUNNING;
        case 6: return FINISHED;
        case 7: return FAILED;
        case 8: return KILLED;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<YarnApplicationStateProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        YarnApplicationStateProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<YarnApplicationStateProto>() {
            public YarnApplicationStateProto findValueByNumber(int number) {
              return YarnApplicationStateProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(3);
    }

    private static final YarnApplicationStateProto[] VALUES = values();

    public static YarnApplicationStateProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private YarnApplicationStateProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.YarnApplicationStateProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.YarnApplicationAttemptStateProto}
   */
  public enum YarnApplicationAttemptStateProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>APP_ATTEMPT_NEW = 1;</code>
     */
    APP_ATTEMPT_NEW(1),
    /**
     * <code>APP_ATTEMPT_SUBMITTED = 2;</code>
     */
    APP_ATTEMPT_SUBMITTED(2),
    /**
     * <code>APP_ATTEMPT_SCHEDULED = 3;</code>
     */
    APP_ATTEMPT_SCHEDULED(3),
    /**
     * <code>APP_ATTEMPT_ALLOCATED_SAVING = 4;</code>
     */
    APP_ATTEMPT_ALLOCATED_SAVING(4),
    /**
     * <code>APP_ATTEMPT_ALLOCATED = 5;</code>
     */
    APP_ATTEMPT_ALLOCATED(5),
    /**
     * <code>APP_ATTEMPT_LAUNCHED = 6;</code>
     */
    APP_ATTEMPT_LAUNCHED(6),
    /**
     * <code>APP_ATTEMPT_FAILED = 7;</code>
     */
    APP_ATTEMPT_FAILED(7),
    /**
     * <code>APP_ATTEMPT_RUNNING = 8;</code>
     */
    APP_ATTEMPT_RUNNING(8),
    /**
     * <code>APP_ATTEMPT_FINISHING = 9;</code>
     */
    APP_ATTEMPT_FINISHING(9),
    /**
     * <code>APP_ATTEMPT_FINISHED = 10;</code>
     */
    APP_ATTEMPT_FINISHED(10),
    /**
     * <code>APP_ATTEMPT_KILLED = 11;</code>
     */
    APP_ATTEMPT_KILLED(11),
    ;

    /**
     * <code>APP_ATTEMPT_NEW = 1;</code>
     */
    public static final int APP_ATTEMPT_NEW_VALUE = 1;
    /**
     * <code>APP_ATTEMPT_SUBMITTED = 2;</code>
     */
    public static final int APP_ATTEMPT_SUBMITTED_VALUE = 2;
    /**
     * <code>APP_ATTEMPT_SCHEDULED = 3;</code>
     */
    public static final int APP_ATTEMPT_SCHEDULED_VALUE = 3;
    /**
     * <code>APP_ATTEMPT_ALLOCATED_SAVING = 4;</code>
     */
    public static final int APP_ATTEMPT_ALLOCATED_SAVING_VALUE = 4;
    /**
     * <code>APP_ATTEMPT_ALLOCATED = 5;</code>
     */
    public static final int APP_ATTEMPT_ALLOCATED_VALUE = 5;
    /**
     * <code>APP_ATTEMPT_LAUNCHED = 6;</code>
     */
    public static final int APP_ATTEMPT_LAUNCHED_VALUE = 6;
    /**
     * <code>APP_ATTEMPT_FAILED = 7;</code>
     */
    public static final int APP_ATTEMPT_FAILED_VALUE = 7;
    /**
     * <code>APP_ATTEMPT_RUNNING = 8;</code>
     */
    public static final int APP_ATTEMPT_RUNNING_VALUE = 8;
    /**
     * <code>APP_ATTEMPT_FINISHING = 9;</code>
     */
    public static final int APP_ATTEMPT_FINISHING_VALUE = 9;
    /**
     * <code>APP_ATTEMPT_FINISHED = 10;</code>
     */
    public static final int APP_ATTEMPT_FINISHED_VALUE = 10;
    /**
     * <code>APP_ATTEMPT_KILLED = 11;</code>
     */
    public static final int APP_ATTEMPT_KILLED_VALUE = 11;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static YarnApplicationAttemptStateProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static YarnApplicationAttemptStateProto forNumber(int value) {
      switch (value) {
        case 1: return APP_ATTEMPT_NEW;
        case 2: return APP_ATTEMPT_SUBMITTED;
        case 3: return APP_ATTEMPT_SCHEDULED;
        case 4: return APP_ATTEMPT_ALLOCATED_SAVING;
        case 5: return APP_ATTEMPT_ALLOCATED;
        case 6: return APP_ATTEMPT_LAUNCHED;
        case 7: return APP_ATTEMPT_FAILED;
        case 8: return APP_ATTEMPT_RUNNING;
        case 9: return APP_ATTEMPT_FINISHING;
        case 10: return APP_ATTEMPT_FINISHED;
        case 11: return APP_ATTEMPT_KILLED;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<YarnApplicationAttemptStateProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        YarnApplicationAttemptStateProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<YarnApplicationAttemptStateProto>() {
            public YarnApplicationAttemptStateProto findValueByNumber(int number) {
              return YarnApplicationAttemptStateProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(4);
    }

    private static final YarnApplicationAttemptStateProto[] VALUES = values();

    public static YarnApplicationAttemptStateProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private YarnApplicationAttemptStateProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.YarnApplicationAttemptStateProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.FinalApplicationStatusProto}
   */
  public enum FinalApplicationStatusProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>APP_UNDEFINED = 0;</code>
     */
    APP_UNDEFINED(0),
    /**
     * <code>APP_SUCCEEDED = 1;</code>
     */
    APP_SUCCEEDED(1),
    /**
     * <code>APP_FAILED = 2;</code>
     */
    APP_FAILED(2),
    /**
     * <code>APP_KILLED = 3;</code>
     */
    APP_KILLED(3),
    /**
     * <code>APP_ENDED = 4;</code>
     */
    APP_ENDED(4),
    ;

    /**
     * <code>APP_UNDEFINED = 0;</code>
     */
    public static final int APP_UNDEFINED_VALUE = 0;
    /**
     * <code>APP_SUCCEEDED = 1;</code>
     */
    public static final int APP_SUCCEEDED_VALUE = 1;
    /**
     * <code>APP_FAILED = 2;</code>
     */
    public static final int APP_FAILED_VALUE = 2;
    /**
     * <code>APP_KILLED = 3;</code>
     */
    public static final int APP_KILLED_VALUE = 3;
    /**
     * <code>APP_ENDED = 4;</code>
     */
    public static final int APP_ENDED_VALUE = 4;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static FinalApplicationStatusProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static FinalApplicationStatusProto forNumber(int value) {
      switch (value) {
        case 0: return APP_UNDEFINED;
        case 1: return APP_SUCCEEDED;
        case 2: return APP_FAILED;
        case 3: return APP_KILLED;
        case 4: return APP_ENDED;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<FinalApplicationStatusProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        FinalApplicationStatusProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<FinalApplicationStatusProto>() {
            public FinalApplicationStatusProto findValueByNumber(int number) {
              return FinalApplicationStatusProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(5);
    }

    private static final FinalApplicationStatusProto[] VALUES = values();

    public static FinalApplicationStatusProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private FinalApplicationStatusProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.FinalApplicationStatusProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.LocalResourceVisibilityProto}
   */
  public enum LocalResourceVisibilityProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>PUBLIC = 1;</code>
     */
    PUBLIC(1),
    /**
     * <code>PRIVATE = 2;</code>
     */
    PRIVATE(2),
    /**
     * <code>APPLICATION = 3;</code>
     */
    APPLICATION(3),
    ;

    /**
     * <code>PUBLIC = 1;</code>
     */
    public static final int PUBLIC_VALUE = 1;
    /**
     * <code>PRIVATE = 2;</code>
     */
    public static final int PRIVATE_VALUE = 2;
    /**
     * <code>APPLICATION = 3;</code>
     */
    public static final int APPLICATION_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static LocalResourceVisibilityProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static LocalResourceVisibilityProto forNumber(int value) {
      switch (value) {
        case 1: return PUBLIC;
        case 2: return PRIVATE;
        case 3: return APPLICATION;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<LocalResourceVisibilityProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        LocalResourceVisibilityProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<LocalResourceVisibilityProto>() {
            public LocalResourceVisibilityProto findValueByNumber(int number) {
              return LocalResourceVisibilityProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(6);
    }

    private static final LocalResourceVisibilityProto[] VALUES = values();

    public static LocalResourceVisibilityProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private LocalResourceVisibilityProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.LocalResourceVisibilityProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.LocalResourceTypeProto}
   */
  public enum LocalResourceTypeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>ARCHIVE = 1;</code>
     */
    ARCHIVE(1),
    /**
     * <code>FILE = 2;</code>
     */
    FILE(2),
    /**
     * <code>PATTERN = 3;</code>
     */
    PATTERN(3),
    ;

    /**
     * <code>ARCHIVE = 1;</code>
     */
    public static final int ARCHIVE_VALUE = 1;
    /**
     * <code>FILE = 2;</code>
     */
    public static final int FILE_VALUE = 2;
    /**
     * <code>PATTERN = 3;</code>
     */
    public static final int PATTERN_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static LocalResourceTypeProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static LocalResourceTypeProto forNumber(int value) {
      switch (value) {
        case 1: return ARCHIVE;
        case 2: return FILE;
        case 3: return PATTERN;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<LocalResourceTypeProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        LocalResourceTypeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<LocalResourceTypeProto>() {
            public LocalResourceTypeProto findValueByNumber(int number) {
              return LocalResourceTypeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(7);
    }

    private static final LocalResourceTypeProto[] VALUES = values();

    public static LocalResourceTypeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private LocalResourceTypeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.LocalResourceTypeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.LogAggregationStatusProto}
   */
  public enum LogAggregationStatusProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>LOG_DISABLED = 1;</code>
     */
    LOG_DISABLED(1),
    /**
     * <code>LOG_NOT_START = 2;</code>
     */
    LOG_NOT_START(2),
    /**
     * <code>LOG_RUNNING = 3;</code>
     */
    LOG_RUNNING(3),
    /**
     * <code>LOG_SUCCEEDED = 4;</code>
     */
    LOG_SUCCEEDED(4),
    /**
     * <code>LOG_FAILED = 5;</code>
     */
    LOG_FAILED(5),
    /**
     * <code>LOG_TIME_OUT = 6;</code>
     */
    LOG_TIME_OUT(6),
    /**
     * <code>LOG_RUNNING_WITH_FAILURE = 7;</code>
     */
    LOG_RUNNING_WITH_FAILURE(7),
    ;

    /**
     * <code>LOG_DISABLED = 1;</code>
     */
    public static final int LOG_DISABLED_VALUE = 1;
    /**
     * <code>LOG_NOT_START = 2;</code>
     */
    public static final int LOG_NOT_START_VALUE = 2;
    /**
     * <code>LOG_RUNNING = 3;</code>
     */
    public static final int LOG_RUNNING_VALUE = 3;
    /**
     * <code>LOG_SUCCEEDED = 4;</code>
     */
    public static final int LOG_SUCCEEDED_VALUE = 4;
    /**
     * <code>LOG_FAILED = 5;</code>
     */
    public static final int LOG_FAILED_VALUE = 5;
    /**
     * <code>LOG_TIME_OUT = 6;</code>
     */
    public static final int LOG_TIME_OUT_VALUE = 6;
    /**
     * <code>LOG_RUNNING_WITH_FAILURE = 7;</code>
     */
    public static final int LOG_RUNNING_WITH_FAILURE_VALUE = 7;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static LogAggregationStatusProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static LogAggregationStatusProto forNumber(int value) {
      switch (value) {
        case 1: return LOG_DISABLED;
        case 2: return LOG_NOT_START;
        case 3: return LOG_RUNNING;
        case 4: return LOG_SUCCEEDED;
        case 5: return LOG_FAILED;
        case 6: return LOG_TIME_OUT;
        case 7: return LOG_RUNNING_WITH_FAILURE;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<LogAggregationStatusProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        LogAggregationStatusProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<LogAggregationStatusProto>() {
            public LogAggregationStatusProto findValueByNumber(int number) {
              return LogAggregationStatusProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(8);
    }

    private static final LogAggregationStatusProto[] VALUES = values();

    public static LogAggregationStatusProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private LogAggregationStatusProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.LogAggregationStatusProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.NodeStateProto}
   */
  public enum NodeStateProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>NS_NEW = 1;</code>
     */
    NS_NEW(1),
    /**
     * <code>NS_RUNNING = 2;</code>
     */
    NS_RUNNING(2),
    /**
     * <code>NS_UNHEALTHY = 3;</code>
     */
    NS_UNHEALTHY(3),
    /**
     * <code>NS_DECOMMISSIONED = 4;</code>
     */
    NS_DECOMMISSIONED(4),
    /**
     * <code>NS_LOST = 5;</code>
     */
    NS_LOST(5),
    /**
     * <code>NS_REBOOTED = 6;</code>
     */
    NS_REBOOTED(6),
    /**
     * <code>NS_DECOMMISSIONING = 7;</code>
     */
    NS_DECOMMISSIONING(7),
    /**
     * <code>NS_SHUTDOWN = 8;</code>
     */
    NS_SHUTDOWN(8),
    ;

    /**
     * <code>NS_NEW = 1;</code>
     */
    public static final int NS_NEW_VALUE = 1;
    /**
     * <code>NS_RUNNING = 2;</code>
     */
    public static final int NS_RUNNING_VALUE = 2;
    /**
     * <code>NS_UNHEALTHY = 3;</code>
     */
    public static final int NS_UNHEALTHY_VALUE = 3;
    /**
     * <code>NS_DECOMMISSIONED = 4;</code>
     */
    public static final int NS_DECOMMISSIONED_VALUE = 4;
    /**
     * <code>NS_LOST = 5;</code>
     */
    public static final int NS_LOST_VALUE = 5;
    /**
     * <code>NS_REBOOTED = 6;</code>
     */
    public static final int NS_REBOOTED_VALUE = 6;
    /**
     * <code>NS_DECOMMISSIONING = 7;</code>
     */
    public static final int NS_DECOMMISSIONING_VALUE = 7;
    /**
     * <code>NS_SHUTDOWN = 8;</code>
     */
    public static final int NS_SHUTDOWN_VALUE = 8;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static NodeStateProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static NodeStateProto forNumber(int value) {
      switch (value) {
        case 1: return NS_NEW;
        case 2: return NS_RUNNING;
        case 3: return NS_UNHEALTHY;
        case 4: return NS_DECOMMISSIONED;
        case 5: return NS_LOST;
        case 6: return NS_REBOOTED;
        case 7: return NS_DECOMMISSIONING;
        case 8: return NS_SHUTDOWN;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<NodeStateProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        NodeStateProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<NodeStateProto>() {
            public NodeStateProto findValueByNumber(int number) {
              return NodeStateProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(9);
    }

    private static final NodeStateProto[] VALUES = values();

    public static NodeStateProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private NodeStateProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.NodeStateProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.NodeUpdateTypeProto}
   */
  public enum NodeUpdateTypeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>NODE_USABLE = 0;</code>
     */
    NODE_USABLE(0),
    /**
     * <code>NODE_UNUSABLE = 1;</code>
     */
    NODE_UNUSABLE(1),
    /**
     * <code>NODE_DECOMMISSIONING = 2;</code>
     */
    NODE_DECOMMISSIONING(2),
    ;

    /**
     * <code>NODE_USABLE = 0;</code>
     */
    public static final int NODE_USABLE_VALUE = 0;
    /**
     * <code>NODE_UNUSABLE = 1;</code>
     */
    public static final int NODE_UNUSABLE_VALUE = 1;
    /**
     * <code>NODE_DECOMMISSIONING = 2;</code>
     */
    public static final int NODE_DECOMMISSIONING_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static NodeUpdateTypeProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static NodeUpdateTypeProto forNumber(int value) {
      switch (value) {
        case 0: return NODE_USABLE;
        case 1: return NODE_UNUSABLE;
        case 2: return NODE_DECOMMISSIONING;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<NodeUpdateTypeProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        NodeUpdateTypeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<NodeUpdateTypeProto>() {
            public NodeUpdateTypeProto findValueByNumber(int number) {
              return NodeUpdateTypeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(10);
    }

    private static final NodeUpdateTypeProto[] VALUES = values();

    public static NodeUpdateTypeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private NodeUpdateTypeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.NodeUpdateTypeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.NodeAttributeTypeProto}
   */
  public enum NodeAttributeTypeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>STRING = 1;</code>
     */
    STRING(1),
    ;

    /**
     * <code>STRING = 1;</code>
     */
    public static final int STRING_VALUE = 1;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static NodeAttributeTypeProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static NodeAttributeTypeProto forNumber(int value) {
      switch (value) {
        case 1: return STRING;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<NodeAttributeTypeProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        NodeAttributeTypeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<NodeAttributeTypeProto>() {
            public NodeAttributeTypeProto findValueByNumber(int number) {
              return NodeAttributeTypeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(11);
    }

    private static final NodeAttributeTypeProto[] VALUES = values();

    public static NodeAttributeTypeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private NodeAttributeTypeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.NodeAttributeTypeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ContainerTypeProto}
   */
  public enum ContainerTypeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>APPLICATION_MASTER = 1;</code>
     */
    APPLICATION_MASTER(1),
    /**
     * <code>TASK = 2;</code>
     */
    TASK(2),
    ;

    /**
     * <code>APPLICATION_MASTER = 1;</code>
     */
    public static final int APPLICATION_MASTER_VALUE = 1;
    /**
     * <code>TASK = 2;</code>
     */
    public static final int TASK_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ContainerTypeProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ContainerTypeProto forNumber(int value) {
      switch (value) {
        case 1: return APPLICATION_MASTER;
        case 2: return TASK;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerTypeProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ContainerTypeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerTypeProto>() {
            public ContainerTypeProto findValueByNumber(int number) {
              return ContainerTypeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(12);
    }

    private static final ContainerTypeProto[] VALUES = values();

    public static ContainerTypeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ContainerTypeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerTypeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ExecutionTypeProto}
   */
  public enum ExecutionTypeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>GUARANTEED = 1;</code>
     */
    GUARANTEED(1),
    /**
     * <code>OPPORTUNISTIC = 2;</code>
     */
    OPPORTUNISTIC(2),
    ;

    /**
     * <code>GUARANTEED = 1;</code>
     */
    public static final int GUARANTEED_VALUE = 1;
    /**
     * <code>OPPORTUNISTIC = 2;</code>
     */
    public static final int OPPORTUNISTIC_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ExecutionTypeProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ExecutionTypeProto forNumber(int value) {
      switch (value) {
        case 1: return GUARANTEED;
        case 2: return OPPORTUNISTIC;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ExecutionTypeProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ExecutionTypeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ExecutionTypeProto>() {
            public ExecutionTypeProto findValueByNumber(int number) {
              return ExecutionTypeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(13);
    }

    private static final ExecutionTypeProto[] VALUES = values();

    public static ExecutionTypeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ExecutionTypeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ExecutionTypeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.AMCommandProto}
   */
  public enum AMCommandProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>AM_RESYNC = 1;</code>
     */
    AM_RESYNC(1),
    /**
     * <code>AM_SHUTDOWN = 2;</code>
     */
    AM_SHUTDOWN(2),
    ;

    /**
     * <code>AM_RESYNC = 1;</code>
     */
    public static final int AM_RESYNC_VALUE = 1;
    /**
     * <code>AM_SHUTDOWN = 2;</code>
     */
    public static final int AM_SHUTDOWN_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static AMCommandProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static AMCommandProto forNumber(int value) {
      switch (value) {
        case 1: return AM_RESYNC;
        case 2: return AM_SHUTDOWN;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<AMCommandProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        AMCommandProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<AMCommandProto>() {
            public AMCommandProto findValueByNumber(int number) {
              return AMCommandProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(14);
    }

    private static final AMCommandProto[] VALUES = values();

    public static AMCommandProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private AMCommandProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.AMCommandProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.RejectionReasonProto}
   */
  public enum RejectionReasonProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>RRP_COULD_NOT_PLACE_ON_NODE = 1;</code>
     */
    RRP_COULD_NOT_PLACE_ON_NODE(1),
    /**
     * <code>RRP_COULD_NOT_SCHEDULE_ON_NODE = 2;</code>
     */
    RRP_COULD_NOT_SCHEDULE_ON_NODE(2),
    ;

    /**
     * <code>RRP_COULD_NOT_PLACE_ON_NODE = 1;</code>
     */
    public static final int RRP_COULD_NOT_PLACE_ON_NODE_VALUE = 1;
    /**
     * <code>RRP_COULD_NOT_SCHEDULE_ON_NODE = 2;</code>
     */
    public static final int RRP_COULD_NOT_SCHEDULE_ON_NODE_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static RejectionReasonProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static RejectionReasonProto forNumber(int value) {
      switch (value) {
        case 1: return RRP_COULD_NOT_PLACE_ON_NODE;
        case 2: return RRP_COULD_NOT_SCHEDULE_ON_NODE;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<RejectionReasonProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        RejectionReasonProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<RejectionReasonProto>() {
            public RejectionReasonProto findValueByNumber(int number) {
              return RejectionReasonProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(15);
    }

    private static final RejectionReasonProto[] VALUES = values();

    public static RejectionReasonProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private RejectionReasonProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.RejectionReasonProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ApplicationTimeoutTypeProto}
   */
  public enum ApplicationTimeoutTypeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>APP_TIMEOUT_LIFETIME = 1;</code>
     */
    APP_TIMEOUT_LIFETIME(1),
    ;

    /**
     * <code>APP_TIMEOUT_LIFETIME = 1;</code>
     */
    public static final int APP_TIMEOUT_LIFETIME_VALUE = 1;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ApplicationTimeoutTypeProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ApplicationTimeoutTypeProto forNumber(int value) {
      switch (value) {
        case 1: return APP_TIMEOUT_LIFETIME;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ApplicationTimeoutTypeProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ApplicationTimeoutTypeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ApplicationTimeoutTypeProto>() {
            public ApplicationTimeoutTypeProto findValueByNumber(int number) {
              return ApplicationTimeoutTypeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(16);
    }

    private static final ApplicationTimeoutTypeProto[] VALUES = values();

    public static ApplicationTimeoutTypeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ApplicationTimeoutTypeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ApplicationTimeoutTypeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ApplicationAccessTypeProto}
   */
  public enum ApplicationAccessTypeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>APPACCESS_VIEW_APP = 1;</code>
     */
    APPACCESS_VIEW_APP(1),
    /**
     * <code>APPACCESS_MODIFY_APP = 2;</code>
     */
    APPACCESS_MODIFY_APP(2),
    ;

    /**
     * <code>APPACCESS_VIEW_APP = 1;</code>
     */
    public static final int APPACCESS_VIEW_APP_VALUE = 1;
    /**
     * <code>APPACCESS_MODIFY_APP = 2;</code>
     */
    public static final int APPACCESS_MODIFY_APP_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ApplicationAccessTypeProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ApplicationAccessTypeProto forNumber(int value) {
      switch (value) {
        case 1: return APPACCESS_VIEW_APP;
        case 2: return APPACCESS_MODIFY_APP;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ApplicationAccessTypeProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ApplicationAccessTypeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ApplicationAccessTypeProto>() {
            public ApplicationAccessTypeProto findValueByNumber(int number) {
              return ApplicationAccessTypeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(17);
    }

    private static final ApplicationAccessTypeProto[] VALUES = values();

    public static ApplicationAccessTypeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ApplicationAccessTypeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ApplicationAccessTypeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.QueueStateProto}
   */
  public enum QueueStateProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>Q_STOPPED = 1;</code>
     */
    Q_STOPPED(1),
    /**
     * <code>Q_RUNNING = 2;</code>
     */
    Q_RUNNING(2),
    /**
     * <code>Q_DRAINING = 3;</code>
     */
    Q_DRAINING(3),
    ;

    /**
     * <code>Q_STOPPED = 1;</code>
     */
    public static final int Q_STOPPED_VALUE = 1;
    /**
     * <code>Q_RUNNING = 2;</code>
     */
    public static final int Q_RUNNING_VALUE = 2;
    /**
     * <code>Q_DRAINING = 3;</code>
     */
    public static final int Q_DRAINING_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static QueueStateProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static QueueStateProto forNumber(int value) {
      switch (value) {
        case 1: return Q_STOPPED;
        case 2: return Q_RUNNING;
        case 3: return Q_DRAINING;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<QueueStateProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        QueueStateProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<QueueStateProto>() {
            public QueueStateProto findValueByNumber(int number) {
              return QueueStateProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(18);
    }

    private static final QueueStateProto[] VALUES = values();

    public static QueueStateProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private QueueStateProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.QueueStateProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.QueueACLProto}
   */
  public enum QueueACLProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>QACL_SUBMIT_APPLICATIONS = 1;</code>
     */
    QACL_SUBMIT_APPLICATIONS(1),
    /**
     * <code>QACL_ADMINISTER_QUEUE = 2;</code>
     */
    QACL_ADMINISTER_QUEUE(2),
    ;

    /**
     * <code>QACL_SUBMIT_APPLICATIONS = 1;</code>
     */
    public static final int QACL_SUBMIT_APPLICATIONS_VALUE = 1;
    /**
     * <code>QACL_ADMINISTER_QUEUE = 2;</code>
     */
    public static final int QACL_ADMINISTER_QUEUE_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static QueueACLProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static QueueACLProto forNumber(int value) {
      switch (value) {
        case 1: return QACL_SUBMIT_APPLICATIONS;
        case 2: return QACL_ADMINISTER_QUEUE;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<QueueACLProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        QueueACLProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<QueueACLProto>() {
            public QueueACLProto findValueByNumber(int number) {
              return QueueACLProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(19);
    }

    private static final QueueACLProto[] VALUES = values();

    public static QueueACLProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private QueueACLProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.QueueACLProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.SignalContainerCommandProto}
   */
  public enum SignalContainerCommandProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>OUTPUT_THREAD_DUMP = 1;</code>
     */
    OUTPUT_THREAD_DUMP(1),
    /**
     * <code>GRACEFUL_SHUTDOWN = 2;</code>
     */
    GRACEFUL_SHUTDOWN(2),
    /**
     * <code>FORCEFUL_SHUTDOWN = 3;</code>
     */
    FORCEFUL_SHUTDOWN(3),
    ;

    /**
     * <code>OUTPUT_THREAD_DUMP = 1;</code>
     */
    public static final int OUTPUT_THREAD_DUMP_VALUE = 1;
    /**
     * <code>GRACEFUL_SHUTDOWN = 2;</code>
     */
    public static final int GRACEFUL_SHUTDOWN_VALUE = 2;
    /**
     * <code>FORCEFUL_SHUTDOWN = 3;</code>
     */
    public static final int FORCEFUL_SHUTDOWN_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static SignalContainerCommandProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static SignalContainerCommandProto forNumber(int value) {
      switch (value) {
        case 1: return OUTPUT_THREAD_DUMP;
        case 2: return GRACEFUL_SHUTDOWN;
        case 3: return FORCEFUL_SHUTDOWN;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<SignalContainerCommandProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        SignalContainerCommandProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<SignalContainerCommandProto>() {
            public SignalContainerCommandProto findValueByNumber(int number) {
              return SignalContainerCommandProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(20);
    }

    private static final SignalContainerCommandProto[] VALUES = values();

    public static SignalContainerCommandProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private SignalContainerCommandProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.SignalContainerCommandProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.NodeAttributeOpCodeProto}
   */
  public enum NodeAttributeOpCodeProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>NO_OP = 1;</code>
     */
    NO_OP(1),
    /**
     * <code>EQ = 2;</code>
     */
    EQ(2),
    /**
     * <code>NE = 3;</code>
     */
    NE(3),
    ;

    /**
     * <code>NO_OP = 1;</code>
     */
    public static final int NO_OP_VALUE = 1;
    /**
     * <code>EQ = 2;</code>
     */
    public static final int EQ_VALUE = 2;
    /**
     * <code>NE = 3;</code>
     */
    public static final int NE_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static NodeAttributeOpCodeProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static NodeAttributeOpCodeProto forNumber(int value) {
      switch (value) {
        case 1: return NO_OP;
        case 2: return EQ;
        case 3: return NE;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<NodeAttributeOpCodeProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        NodeAttributeOpCodeProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<NodeAttributeOpCodeProto>() {
            public NodeAttributeOpCodeProto findValueByNumber(int number) {
              return NodeAttributeOpCodeProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(21);
    }

    private static final NodeAttributeOpCodeProto[] VALUES = values();

    public static NodeAttributeOpCodeProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private NodeAttributeOpCodeProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.NodeAttributeOpCodeProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ReservationRequestInterpreterProto}
   */
  public enum ReservationRequestInterpreterProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>R_ANY = 0;</code>
     */
    R_ANY(0),
    /**
     * <code>R_ALL = 1;</code>
     */
    R_ALL(1),
    /**
     * <code>R_ORDER = 2;</code>
     */
    R_ORDER(2),
    /**
     * <code>R_ORDER_NO_GAP = 3;</code>
     */
    R_ORDER_NO_GAP(3),
    ;

    /**
     * <code>R_ANY = 0;</code>
     */
    public static final int R_ANY_VALUE = 0;
    /**
     * <code>R_ALL = 1;</code>
     */
    public static final int R_ALL_VALUE = 1;
    /**
     * <code>R_ORDER = 2;</code>
     */
    public static final int R_ORDER_VALUE = 2;
    /**
     * <code>R_ORDER_NO_GAP = 3;</code>
     */
    public static final int R_ORDER_NO_GAP_VALUE = 3;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ReservationRequestInterpreterProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ReservationRequestInterpreterProto forNumber(int value) {
      switch (value) {
        case 0: return R_ANY;
        case 1: return R_ALL;
        case 2: return R_ORDER;
        case 3: return R_ORDER_NO_GAP;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ReservationRequestInterpreterProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ReservationRequestInterpreterProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ReservationRequestInterpreterProto>() {
            public ReservationRequestInterpreterProto findValueByNumber(int number) {
              return ReservationRequestInterpreterProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(22);
    }

    private static final ReservationRequestInterpreterProto[] VALUES = values();

    public static ReservationRequestInterpreterProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ReservationRequestInterpreterProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ReservationRequestInterpreterProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ContainerExitStatusProto}
   */
  public enum ContainerExitStatusProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>SUCCESS = 0;</code>
     */
    SUCCESS(0),
    /**
     * <code>INVALID = -1000;</code>
     */
    INVALID(-1000),
    /**
     * <code>ABORTED = -100;</code>
     */
    ABORTED(-100),
    /**
     * <code>DISKS_FAILED = -101;</code>
     */
    DISKS_FAILED(-101),
    ;

    /**
     * <code>SUCCESS = 0;</code>
     */
    public static final int SUCCESS_VALUE = 0;
    /**
     * <code>INVALID = -1000;</code>
     */
    public static final int INVALID_VALUE = -1000;
    /**
     * <code>ABORTED = -100;</code>
     */
    public static final int ABORTED_VALUE = -100;
    /**
     * <code>DISKS_FAILED = -101;</code>
     */
    public static final int DISKS_FAILED_VALUE = -101;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ContainerExitStatusProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ContainerExitStatusProto forNumber(int value) {
      switch (value) {
        case 0: return SUCCESS;
        case -1000: return INVALID;
        case -100: return ABORTED;
        case -101: return DISKS_FAILED;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerExitStatusProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ContainerExitStatusProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerExitStatusProto>() {
            public ContainerExitStatusProto findValueByNumber(int number) {
              return ContainerExitStatusProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(23);
    }

    private static final ContainerExitStatusProto[] VALUES = values();

    public static ContainerExitStatusProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ContainerExitStatusProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerExitStatusProto)
  }

  /**
   * Protobuf enum {@code hadoop.yarn.ContainerRetryPolicyProto}
   */
  public enum ContainerRetryPolicyProto
      implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
    /**
     * <code>NEVER_RETRY = 0;</code>
     */
    NEVER_RETRY(0),
    /**
     * <code>RETRY_ON_ALL_ERRORS = 1;</code>
     */
    RETRY_ON_ALL_ERRORS(1),
    /**
     * <code>RETRY_ON_SPECIFIC_ERROR_CODES = 2;</code>
     */
    RETRY_ON_SPECIFIC_ERROR_CODES(2),
    ;

    /**
     * <code>NEVER_RETRY = 0;</code>
     */
    public static final int NEVER_RETRY_VALUE = 0;
    /**
     * <code>RETRY_ON_ALL_ERRORS = 1;</code>
     */
    public static final int RETRY_ON_ALL_ERRORS_VALUE = 1;
    /**
     * <code>RETRY_ON_SPECIFIC_ERROR_CODES = 2;</code>
     */
    public static final int RETRY_ON_SPECIFIC_ERROR_CODES_VALUE = 2;


    public final int getNumber() {
      return value;
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     * @deprecated Use {@link #forNumber(int)} instead.
     */
    @java.lang.Deprecated
    public static ContainerRetryPolicyProto valueOf(int value) {
      return forNumber(value);
    }

    /**
     * @param value The numeric wire value of the corresponding enum entry.
     * @return The enum associated with the given numeric wire value.
     */
    public static ContainerRetryPolicyProto forNumber(int value) {
      switch (value) {
        case 0: return NEVER_RETRY;
        case 1: return RETRY_ON_ALL_ERRORS;
        case 2: return RETRY_ON_SPECIFIC_ERROR_CODES;
        default: return null;
      }
    }

    public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerRetryPolicyProto>
        internalGetValueMap() {
      return internalValueMap;
    }
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
        ContainerRetryPolicyProto> internalValueMap =
          new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<ContainerRetryPolicyProto>() {
            public ContainerRetryPolicyProto findValueByNumber(int number) {
              return ContainerRetryPolicyProto.forNumber(number);
            }
          };

    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
        getValueDescriptor() {
      return getDescriptor().getValues().get(ordinal());
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }
    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor().getEnumTypes().get(24);
    }

    private static final ContainerRetryPolicyProto[] VALUES = values();

    public static ContainerRetryPolicyProto valueOf(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
      if (desc.getType() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "EnumValueDescriptor is not for this type.");
      }
      return VALUES[desc.getIndex()];
    }

    private final int value;

    private ContainerRetryPolicyProto(int value) {
      this.value = value;
    }

    // @@protoc_insertion_point(enum_scope:hadoop.yarn.ContainerRetryPolicyProto)
  }

  public interface SerializedExceptionProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.SerializedExceptionProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string message = 1;</code>
     * @return Whether the message field is set.
     */
    boolean hasMessage();
    /**
     * <code>optional string message = 1;</code>
     * @return The message.
     */
    java.lang.String getMessage();
    /**
     * <code>optional string message = 1;</code>
     * @return The bytes for message.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getMessageBytes();

    /**
     * <code>optional string trace = 2;</code>
     * @return Whether the trace field is set.
     */
    boolean hasTrace();
    /**
     * <code>optional string trace = 2;</code>
     * @return The trace.
     */
    java.lang.String getTrace();
    /**
     * <code>optional string trace = 2;</code>
     * @return The bytes for trace.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTraceBytes();

    /**
     * <code>optional string class_name = 3;</code>
     * @return Whether the className field is set.
     */
    boolean hasClassName();
    /**
     * <code>optional string class_name = 3;</code>
     * @return The className.
     */
    java.lang.String getClassName();
    /**
     * <code>optional string class_name = 3;</code>
     * @return The bytes for className.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getClassNameBytes();

    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
     * @return Whether the cause field is set.
     */
    boolean hasCause();
    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
     * @return The cause.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getCause();
    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getCauseOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.SerializedExceptionProto}
   */
  public static final class SerializedExceptionProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.SerializedExceptionProto)
      SerializedExceptionProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use SerializedExceptionProto.newBuilder() to construct.
    private SerializedExceptionProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private SerializedExceptionProto() {
      message_ = "";
      trace_ = "";
      className_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new SerializedExceptionProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SerializedExceptionProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SerializedExceptionProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder.class);
    }

    private int bitField0_;
    public static final int MESSAGE_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object message_ = "";
    /**
     * <code>optional string message = 1;</code>
     * @return Whether the message field is set.
     */
    @java.lang.Override
    public boolean hasMessage() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string message = 1;</code>
     * @return The message.
     */
    @java.lang.Override
    public java.lang.String getMessage() {
      java.lang.Object ref = message_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          message_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string message = 1;</code>
     * @return The bytes for message.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getMessageBytes() {
      java.lang.Object ref = message_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        message_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int TRACE_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object trace_ = "";
    /**
     * <code>optional string trace = 2;</code>
     * @return Whether the trace field is set.
     */
    @java.lang.Override
    public boolean hasTrace() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string trace = 2;</code>
     * @return The trace.
     */
    @java.lang.Override
    public java.lang.String getTrace() {
      java.lang.Object ref = trace_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          trace_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string trace = 2;</code>
     * @return The bytes for trace.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTraceBytes() {
      java.lang.Object ref = trace_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        trace_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int CLASS_NAME_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object className_ = "";
    /**
     * <code>optional string class_name = 3;</code>
     * @return Whether the className field is set.
     */
    @java.lang.Override
    public boolean hasClassName() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string class_name = 3;</code>
     * @return The className.
     */
    @java.lang.Override
    public java.lang.String getClassName() {
      java.lang.Object ref = className_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          className_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string class_name = 3;</code>
     * @return The bytes for className.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getClassNameBytes() {
      java.lang.Object ref = className_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        className_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int CAUSE_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto cause_;
    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
     * @return Whether the cause field is set.
     */
    @java.lang.Override
    public boolean hasCause() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
     * @return The cause.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getCause() {
      return cause_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : cause_;
    }
    /**
     * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getCauseOrBuilder() {
      return cause_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : cause_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, message_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, trace_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, className_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(4, getCause());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, message_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, trace_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, className_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getCause());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto other = (org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto) obj;

      if (hasMessage() != other.hasMessage()) return false;
      if (hasMessage()) {
        if (!getMessage()
            .equals(other.getMessage())) return false;
      }
      if (hasTrace() != other.hasTrace()) return false;
      if (hasTrace()) {
        if (!getTrace()
            .equals(other.getTrace())) return false;
      }
      if (hasClassName() != other.hasClassName()) return false;
      if (hasClassName()) {
        if (!getClassName()
            .equals(other.getClassName())) return false;
      }
      if (hasCause() != other.hasCause()) return false;
      if (hasCause()) {
        if (!getCause()
            .equals(other.getCause())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasMessage()) {
        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
        hash = (53 * hash) + getMessage().hashCode();
      }
      if (hasTrace()) {
        hash = (37 * hash) + TRACE_FIELD_NUMBER;
        hash = (53 * hash) + getTrace().hashCode();
      }
      if (hasClassName()) {
        hash = (37 * hash) + CLASS_NAME_FIELD_NUMBER;
        hash = (53 * hash) + getClassName().hashCode();
      }
      if (hasCause()) {
        hash = (37 * hash) + CAUSE_FIELD_NUMBER;
        hash = (53 * hash) + getCause().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.SerializedExceptionProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.SerializedExceptionProto)
        org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SerializedExceptionProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SerializedExceptionProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getCauseFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        message_ = "";
        trace_ = "";
        className_ = "";
        cause_ = null;
        if (causeBuilder_ != null) {
          causeBuilder_.dispose();
          causeBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SerializedExceptionProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto result = new org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.message_ = message_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.trace_ = trace_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.className_ = className_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.cause_ = causeBuilder_ == null
              ? cause_
              : causeBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance()) return this;
        if (other.hasMessage()) {
          message_ = other.message_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasTrace()) {
          trace_ = other.trace_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasClassName()) {
          className_ = other.className_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasCause()) {
          mergeCause(other.getCause());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                message_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                trace_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                className_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                input.readMessage(
                    getCauseFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object message_ = "";
      /**
       * <code>optional string message = 1;</code>
       * @return Whether the message field is set.
       */
      public boolean hasMessage() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string message = 1;</code>
       * @return The message.
       */
      public java.lang.String getMessage() {
        java.lang.Object ref = message_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            message_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string message = 1;</code>
       * @return The bytes for message.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getMessageBytes() {
        java.lang.Object ref = message_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          message_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string message = 1;</code>
       * @param value The message to set.
       * @return This builder for chaining.
       */
      public Builder setMessage(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        message_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string message = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearMessage() {
        message_ = getDefaultInstance().getMessage();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string message = 1;</code>
       * @param value The bytes for message to set.
       * @return This builder for chaining.
       */
      public Builder setMessageBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        message_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object trace_ = "";
      /**
       * <code>optional string trace = 2;</code>
       * @return Whether the trace field is set.
       */
      public boolean hasTrace() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string trace = 2;</code>
       * @return The trace.
       */
      public java.lang.String getTrace() {
        java.lang.Object ref = trace_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            trace_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string trace = 2;</code>
       * @return The bytes for trace.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTraceBytes() {
        java.lang.Object ref = trace_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          trace_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string trace = 2;</code>
       * @param value The trace to set.
       * @return This builder for chaining.
       */
      public Builder setTrace(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        trace_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string trace = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearTrace() {
        trace_ = getDefaultInstance().getTrace();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string trace = 2;</code>
       * @param value The bytes for trace to set.
       * @return This builder for chaining.
       */
      public Builder setTraceBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        trace_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private java.lang.Object className_ = "";
      /**
       * <code>optional string class_name = 3;</code>
       * @return Whether the className field is set.
       */
      public boolean hasClassName() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string class_name = 3;</code>
       * @return The className.
       */
      public java.lang.String getClassName() {
        java.lang.Object ref = className_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            className_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string class_name = 3;</code>
       * @return The bytes for className.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getClassNameBytes() {
        java.lang.Object ref = className_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          className_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string class_name = 3;</code>
       * @param value The className to set.
       * @return This builder for chaining.
       */
      public Builder setClassName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        className_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string class_name = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearClassName() {
        className_ = getDefaultInstance().getClassName();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string class_name = 3;</code>
       * @param value The bytes for className to set.
       * @return This builder for chaining.
       */
      public Builder setClassNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        className_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto cause_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder> causeBuilder_;
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
       * @return Whether the cause field is set.
       */
      public boolean hasCause() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
       * @return The cause.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getCause() {
        if (causeBuilder_ == null) {
          return cause_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : cause_;
        } else {
          return causeBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
       */
      public Builder setCause(org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto value) {
        if (causeBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          cause_ = value;
        } else {
          causeBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
       */
      public Builder setCause(
          org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder builderForValue) {
        if (causeBuilder_ == null) {
          cause_ = builderForValue.build();
        } else {
          causeBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
       */
      public Builder mergeCause(org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto value) {
        if (causeBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
            cause_ != null &&
            cause_ != org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance()) {
            getCauseBuilder().mergeFrom(value);
          } else {
            cause_ = value;
          }
        } else {
          causeBuilder_.mergeFrom(value);
        }
        if (cause_ != null) {
          bitField0_ |= 0x00000008;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
       */
      public Builder clearCause() {
        bitField0_ = (bitField0_ & ~0x00000008);
        cause_ = null;
        if (causeBuilder_ != null) {
          causeBuilder_.dispose();
          causeBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder getCauseBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getCauseFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder getCauseOrBuilder() {
        if (causeBuilder_ != null) {
          return causeBuilder_.getMessageOrBuilder();
        } else {
          return cause_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.getDefaultInstance() : cause_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.SerializedExceptionProto cause = 4;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder> 
          getCauseFieldBuilder() {
        if (causeBuilder_ == null) {
          causeBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProtoOrBuilder>(
                  getCause(),
                  getParentForChildren(),
                  isClean());
          cause_ = null;
        }
        return causeBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.SerializedExceptionProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.SerializedExceptionProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<SerializedExceptionProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<SerializedExceptionProto>() {
      @java.lang.Override
      public SerializedExceptionProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<SerializedExceptionProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<SerializedExceptionProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SerializedExceptionProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationIdProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationIdProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int32 id = 1;</code>
     * @return Whether the id field is set.
     */
    boolean hasId();
    /**
     * <code>optional int32 id = 1;</code>
     * @return The id.
     */
    int getId();

    /**
     * <code>optional int64 cluster_timestamp = 2;</code>
     * @return Whether the clusterTimestamp field is set.
     */
    boolean hasClusterTimestamp();
    /**
     * <code>optional int64 cluster_timestamp = 2;</code>
     * @return The clusterTimestamp.
     */
    long getClusterTimestamp();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationIdProto}
   */
  public static final class ApplicationIdProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationIdProto)
      ApplicationIdProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationIdProto.newBuilder() to construct.
    private ApplicationIdProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ApplicationIdProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ApplicationIdProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationIdProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationIdProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder.class);
    }

    private int bitField0_;
    public static final int ID_FIELD_NUMBER = 1;
    private int id_ = 0;
    /**
     * <code>optional int32 id = 1;</code>
     * @return Whether the id field is set.
     */
    @java.lang.Override
    public boolean hasId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int32 id = 1;</code>
     * @return The id.
     */
    @java.lang.Override
    public int getId() {
      return id_;
    }

    public static final int CLUSTER_TIMESTAMP_FIELD_NUMBER = 2;
    private long clusterTimestamp_ = 0L;
    /**
     * <code>optional int64 cluster_timestamp = 2;</code>
     * @return Whether the clusterTimestamp field is set.
     */
    @java.lang.Override
    public boolean hasClusterTimestamp() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 cluster_timestamp = 2;</code>
     * @return The clusterTimestamp.
     */
    @java.lang.Override
    public long getClusterTimestamp() {
      return clusterTimestamp_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, id_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, clusterTimestamp_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, id_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, clusterTimestamp_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto) obj;

      if (hasId() != other.hasId()) return false;
      if (hasId()) {
        if (getId()
            != other.getId()) return false;
      }
      if (hasClusterTimestamp() != other.hasClusterTimestamp()) return false;
      if (hasClusterTimestamp()) {
        if (getClusterTimestamp()
            != other.getClusterTimestamp()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + getId();
      }
      if (hasClusterTimestamp()) {
        hash = (37 * hash) + CLUSTER_TIMESTAMP_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getClusterTimestamp());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationIdProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationIdProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationIdProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationIdProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        id_ = 0;
        clusterTimestamp_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationIdProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.id_ = id_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.clusterTimestamp_ = clusterTimestamp_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) return this;
        if (other.hasId()) {
          setId(other.getId());
        }
        if (other.hasClusterTimestamp()) {
          setClusterTimestamp(other.getClusterTimestamp());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                id_ = input.readInt32();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 16: {
                clusterTimestamp_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int id_ ;
      /**
       * <code>optional int32 id = 1;</code>
       * @return Whether the id field is set.
       */
      @java.lang.Override
      public boolean hasId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int32 id = 1;</code>
       * @return The id.
       */
      @java.lang.Override
      public int getId() {
        return id_;
      }
      /**
       * <code>optional int32 id = 1;</code>
       * @param value The id to set.
       * @return This builder for chaining.
       */
      public Builder setId(int value) {

        id_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 id = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        id_ = 0;
        onChanged();
        return this;
      }

      private long clusterTimestamp_ ;
      /**
       * <code>optional int64 cluster_timestamp = 2;</code>
       * @return Whether the clusterTimestamp field is set.
       */
      @java.lang.Override
      public boolean hasClusterTimestamp() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int64 cluster_timestamp = 2;</code>
       * @return The clusterTimestamp.
       */
      @java.lang.Override
      public long getClusterTimestamp() {
        return clusterTimestamp_;
      }
      /**
       * <code>optional int64 cluster_timestamp = 2;</code>
       * @param value The clusterTimestamp to set.
       * @return This builder for chaining.
       */
      public Builder setClusterTimestamp(long value) {

        clusterTimestamp_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 cluster_timestamp = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearClusterTimestamp() {
        bitField0_ = (bitField0_ & ~0x00000002);
        clusterTimestamp_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationIdProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationIdProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationIdProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ApplicationIdProto>() {
      @java.lang.Override
      public ApplicationIdProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationIdProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationIdProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationAttemptIdProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationAttemptIdProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * <code>optional int32 attemptId = 2;</code>
     * @return Whether the attemptId field is set.
     */
    boolean hasAttemptId();
    /**
     * <code>optional int32 attemptId = 2;</code>
     * @return The attemptId.
     */
    int getAttemptId();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationAttemptIdProto}
   */
  public static final class ApplicationAttemptIdProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationAttemptIdProto)
      ApplicationAttemptIdProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationAttemptIdProto.newBuilder() to construct.
    private ApplicationAttemptIdProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ApplicationAttemptIdProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ApplicationAttemptIdProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptIdProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int ATTEMPTID_FIELD_NUMBER = 2;
    private int attemptId_ = 0;
    /**
     * <code>optional int32 attemptId = 2;</code>
     * @return Whether the attemptId field is set.
     */
    @java.lang.Override
    public boolean hasAttemptId() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 attemptId = 2;</code>
     * @return The attemptId.
     */
    @java.lang.Override
    public int getAttemptId() {
      return attemptId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, attemptId_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, attemptId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasAttemptId() != other.hasAttemptId()) return false;
      if (hasAttemptId()) {
        if (getAttemptId()
            != other.getAttemptId()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasAttemptId()) {
        hash = (37 * hash) + ATTEMPTID_FIELD_NUMBER;
        hash = (53 * hash) + getAttemptId();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationAttemptIdProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationAttemptIdProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptIdProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        attemptId_ = 0;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.attemptId_ = attemptId_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasAttemptId()) {
          setAttemptId(other.getAttemptId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                attemptId_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private int attemptId_ ;
      /**
       * <code>optional int32 attemptId = 2;</code>
       * @return Whether the attemptId field is set.
       */
      @java.lang.Override
      public boolean hasAttemptId() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 attemptId = 2;</code>
       * @return The attemptId.
       */
      @java.lang.Override
      public int getAttemptId() {
        return attemptId_;
      }
      /**
       * <code>optional int32 attemptId = 2;</code>
       * @param value The attemptId to set.
       * @return This builder for chaining.
       */
      public Builder setAttemptId(int value) {

        attemptId_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 attemptId = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearAttemptId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        attemptId_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationAttemptIdProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationAttemptIdProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationAttemptIdProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ApplicationAttemptIdProto>() {
      @java.lang.Override
      public ApplicationAttemptIdProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationAttemptIdProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationAttemptIdProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerIdProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerIdProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
     * @return Whether the appId field is set.
     */
    boolean hasAppId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
     * @return The appId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
     * @return Whether the appAttemptId field is set.
     */
    boolean hasAppAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
     * @return The appAttemptId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder();

    /**
     * <code>optional int64 id = 3;</code>
     * @return Whether the id field is set.
     */
    boolean hasId();
    /**
     * <code>optional int64 id = 3;</code>
     * @return The id.
     */
    long getId();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerIdProto}
   */
  public static final class ContainerIdProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerIdProto)
      ContainerIdProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerIdProto.newBuilder() to construct.
    private ContainerIdProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerIdProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerIdProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerIdProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerIdProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder.class);
    }

    private int bitField0_;
    public static final int APP_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
     * @return Whether the appId field is set.
     */
    @java.lang.Override
    public boolean hasAppId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
     * @return The appId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() {
      return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() {
      return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
    }

    public static final int APP_ATTEMPT_ID_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto appAttemptId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
     * @return Whether the appAttemptId field is set.
     */
    @java.lang.Override
    public boolean hasAppAttemptId() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
     * @return The appAttemptId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId() {
      return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder() {
      return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
    }

    public static final int ID_FIELD_NUMBER = 3;
    private long id_ = 0L;
    /**
     * <code>optional int64 id = 3;</code>
     * @return Whether the id field is set.
     */
    @java.lang.Override
    public boolean hasId() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int64 id = 3;</code>
     * @return The id.
     */
    @java.lang.Override
    public long getId() {
      return id_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getAppId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getAppAttemptId());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(3, id_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getAppId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getAppAttemptId());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(3, id_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto) obj;

      if (hasAppId() != other.hasAppId()) return false;
      if (hasAppId()) {
        if (!getAppId()
            .equals(other.getAppId())) return false;
      }
      if (hasAppAttemptId() != other.hasAppAttemptId()) return false;
      if (hasAppAttemptId()) {
        if (!getAppAttemptId()
            .equals(other.getAppAttemptId())) return false;
      }
      if (hasId() != other.hasId()) return false;
      if (hasId()) {
        if (getId()
            != other.getId()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasAppId()) {
        hash = (37 * hash) + APP_ID_FIELD_NUMBER;
        hash = (53 * hash) + getAppId().hashCode();
      }
      if (hasAppAttemptId()) {
        hash = (37 * hash) + APP_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getAppAttemptId().hashCode();
      }
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getId());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerIdProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerIdProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerIdProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerIdProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getAppIdFieldBuilder();
          getAppAttemptIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        appId_ = null;
        if (appIdBuilder_ != null) {
          appIdBuilder_.dispose();
          appIdBuilder_ = null;
        }
        appAttemptId_ = null;
        if (appAttemptIdBuilder_ != null) {
          appAttemptIdBuilder_.dispose();
          appAttemptIdBuilder_ = null;
        }
        id_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerIdProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.appId_ = appIdBuilder_ == null
              ? appId_
              : appIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.appAttemptId_ = appAttemptIdBuilder_ == null
              ? appAttemptId_
              : appAttemptIdBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.id_ = id_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) return this;
        if (other.hasAppId()) {
          mergeAppId(other.getAppId());
        }
        if (other.hasAppAttemptId()) {
          mergeAppAttemptId(other.getAppAttemptId());
        }
        if (other.hasId()) {
          setId(other.getId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getAppIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getAppAttemptIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                id_ = input.readInt64();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto appId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> appIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
       * @return Whether the appId field is set.
       */
      public boolean hasAppId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
       * @return The appId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getAppId() {
        if (appIdBuilder_ == null) {
          return appId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
        } else {
          return appIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
       */
      public Builder setAppId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (appIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          appId_ = value;
        } else {
          appIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
       */
      public Builder setAppId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (appIdBuilder_ == null) {
          appId_ = builderForValue.build();
        } else {
          appIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
       */
      public Builder mergeAppId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (appIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            appId_ != null &&
            appId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getAppIdBuilder().mergeFrom(value);
          } else {
            appId_ = value;
          }
        } else {
          appIdBuilder_.mergeFrom(value);
        }
        if (appId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
       */
      public Builder clearAppId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        appId_ = null;
        if (appIdBuilder_ != null) {
          appIdBuilder_.dispose();
          appIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getAppIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getAppIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getAppIdOrBuilder() {
        if (appIdBuilder_ != null) {
          return appIdBuilder_.getMessageOrBuilder();
        } else {
          return appId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : appId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto app_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getAppIdFieldBuilder() {
        if (appIdBuilder_ == null) {
          appIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getAppId(),
                  getParentForChildren(),
                  isClean());
          appId_ = null;
        }
        return appIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto appAttemptId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> appAttemptIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
       * @return Whether the appAttemptId field is set.
       */
      public boolean hasAppAttemptId() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
       * @return The appAttemptId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getAppAttemptId() {
        if (appAttemptIdBuilder_ == null) {
          return appAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
        } else {
          return appAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
       */
      public Builder setAppAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (appAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          appAttemptId_ = value;
        } else {
          appAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
       */
      public Builder setAppAttemptId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
        if (appAttemptIdBuilder_ == null) {
          appAttemptId_ = builderForValue.build();
        } else {
          appAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
       */
      public Builder mergeAppAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (appAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            appAttemptId_ != null &&
            appAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
            getAppAttemptIdBuilder().mergeFrom(value);
          } else {
            appAttemptId_ = value;
          }
        } else {
          appAttemptIdBuilder_.mergeFrom(value);
        }
        if (appAttemptId_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
       */
      public Builder clearAppAttemptId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        appAttemptId_ = null;
        if (appAttemptIdBuilder_ != null) {
          appAttemptIdBuilder_.dispose();
          appAttemptIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getAppAttemptIdBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getAppAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getAppAttemptIdOrBuilder() {
        if (appAttemptIdBuilder_ != null) {
          return appAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return appAttemptId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : appAttemptId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto app_attempt_id = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> 
          getAppAttemptIdFieldBuilder() {
        if (appAttemptIdBuilder_ == null) {
          appAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
                  getAppAttemptId(),
                  getParentForChildren(),
                  isClean());
          appAttemptId_ = null;
        }
        return appAttemptIdBuilder_;
      }

      private long id_ ;
      /**
       * <code>optional int64 id = 3;</code>
       * @return Whether the id field is set.
       */
      @java.lang.Override
      public boolean hasId() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int64 id = 3;</code>
       * @return The id.
       */
      @java.lang.Override
      public long getId() {
        return id_;
      }
      /**
       * <code>optional int64 id = 3;</code>
       * @param value The id to set.
       * @return This builder for chaining.
       */
      public Builder setId(long value) {

        id_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 id = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearId() {
        bitField0_ = (bitField0_ & ~0x00000004);
        id_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerIdProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerIdProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerIdProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerIdProto>() {
      @java.lang.Override
      public ContainerIdProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerIdProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerIdProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceInformationProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceInformationProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string key = 1;</code>
     * @return Whether the key field is set.
     */
    boolean hasKey();
    /**
     * <code>required string key = 1;</code>
     * @return The key.
     */
    java.lang.String getKey();
    /**
     * <code>required string key = 1;</code>
     * @return The bytes for key.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes();

    /**
     * <code>optional int64 value = 2;</code>
     * @return Whether the value field is set.
     */
    boolean hasValue();
    /**
     * <code>optional int64 value = 2;</code>
     * @return The value.
     */
    long getValue();

    /**
     * <code>optional string units = 3;</code>
     * @return Whether the units field is set.
     */
    boolean hasUnits();
    /**
     * <code>optional string units = 3;</code>
     * @return The units.
     */
    java.lang.String getUnits();
    /**
     * <code>optional string units = 3;</code>
     * @return The bytes for units.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUnitsBytes();

    /**
     * <code>optional .hadoop.yarn.ResourceTypesProto type = 4;</code>
     * @return Whether the type field is set.
     */
    boolean hasType();
    /**
     * <code>optional .hadoop.yarn.ResourceTypesProto type = 4;</code>
     * @return The type.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType();

    /**
     * <code>repeated string tags = 5;</code>
     * @return A list containing the tags.
     */
    java.util.List<java.lang.String>
        getTagsList();
    /**
     * <code>repeated string tags = 5;</code>
     * @return The count of tags.
     */
    int getTagsCount();
    /**
     * <code>repeated string tags = 5;</code>
     * @param index The index of the element to return.
     * @return The tags at the given index.
     */
    java.lang.String getTags(int index);
    /**
     * <code>repeated string tags = 5;</code>
     * @param index The index of the value to return.
     * @return The bytes of the tags at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTagsBytes(int index);

    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> 
        getAttributesList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getAttributes(int index);
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
     */
    int getAttributesCount();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getAttributesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getAttributesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceInformationProto}
   */
  public static final class ResourceInformationProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceInformationProto)
      ResourceInformationProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceInformationProto.newBuilder() to construct.
    private ResourceInformationProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceInformationProto() {
      key_ = "";
      units_ = "";
      type_ = 0;
      tags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      attributes_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceInformationProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceInformationProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceInformationProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder.class);
    }

    private int bitField0_;
    public static final int KEY_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object key_ = "";
    /**
     * <code>required string key = 1;</code>
     * @return Whether the key field is set.
     */
    @java.lang.Override
    public boolean hasKey() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string key = 1;</code>
     * @return The key.
     */
    @java.lang.Override
    public java.lang.String getKey() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          key_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string key = 1;</code>
     * @return The bytes for key.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        key_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int VALUE_FIELD_NUMBER = 2;
    private long value_ = 0L;
    /**
     * <code>optional int64 value = 2;</code>
     * @return Whether the value field is set.
     */
    @java.lang.Override
    public boolean hasValue() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 value = 2;</code>
     * @return The value.
     */
    @java.lang.Override
    public long getValue() {
      return value_;
    }

    public static final int UNITS_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object units_ = "";
    /**
     * <code>optional string units = 3;</code>
     * @return Whether the units field is set.
     */
    @java.lang.Override
    public boolean hasUnits() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string units = 3;</code>
     * @return The units.
     */
    @java.lang.Override
    public java.lang.String getUnits() {
      java.lang.Object ref = units_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          units_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string units = 3;</code>
     * @return The bytes for units.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUnitsBytes() {
      java.lang.Object ref = units_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        units_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int TYPE_FIELD_NUMBER = 4;
    private int type_ = 0;
    /**
     * <code>optional .hadoop.yarn.ResourceTypesProto type = 4;</code>
     * @return Whether the type field is set.
     */
    @java.lang.Override public boolean hasType() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceTypesProto type = 4;</code>
     * @return The type.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto result = org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(type_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.COUNTABLE : result;
    }

    public static final int TAGS_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList tags_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string tags = 5;</code>
     * @return A list containing the tags.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getTagsList() {
      return tags_;
    }
    /**
     * <code>repeated string tags = 5;</code>
     * @return The count of tags.
     */
    public int getTagsCount() {
      return tags_.size();
    }
    /**
     * <code>repeated string tags = 5;</code>
     * @param index The index of the element to return.
     * @return The tags at the given index.
     */
    public java.lang.String getTags(int index) {
      return tags_.get(index);
    }
    /**
     * <code>repeated string tags = 5;</code>
     * @param index The index of the value to return.
     * @return The bytes of the tags at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTagsBytes(int index) {
      return tags_.getByteString(index);
    }

    public static final int ATTRIBUTES_FIELD_NUMBER = 6;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> attributes_;
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getAttributesList() {
      return attributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getAttributesOrBuilderList() {
      return attributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
     */
    @java.lang.Override
    public int getAttributesCount() {
      return attributes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getAttributes(int index) {
      return attributes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getAttributesOrBuilder(
        int index) {
      return attributes_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasKey()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, value_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, units_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeEnum(4, type_);
      }
      for (int i = 0; i < tags_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, tags_.getRaw(i));
      }
      for (int i = 0; i < attributes_.size(); i++) {
        output.writeMessage(6, attributes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, value_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, units_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(4, type_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < tags_.size(); i++) {
          dataSize += computeStringSizeNoTag(tags_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getTagsList().size();
      }
      for (int i = 0; i < attributes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(6, attributes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto) obj;

      if (hasKey() != other.hasKey()) return false;
      if (hasKey()) {
        if (!getKey()
            .equals(other.getKey())) return false;
      }
      if (hasValue() != other.hasValue()) return false;
      if (hasValue()) {
        if (getValue()
            != other.getValue()) return false;
      }
      if (hasUnits() != other.hasUnits()) return false;
      if (hasUnits()) {
        if (!getUnits()
            .equals(other.getUnits())) return false;
      }
      if (hasType() != other.hasType()) return false;
      if (hasType()) {
        if (type_ != other.type_) return false;
      }
      if (!getTagsList()
          .equals(other.getTagsList())) return false;
      if (!getAttributesList()
          .equals(other.getAttributesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasKey()) {
        hash = (37 * hash) + KEY_FIELD_NUMBER;
        hash = (53 * hash) + getKey().hashCode();
      }
      if (hasValue()) {
        hash = (37 * hash) + VALUE_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getValue());
      }
      if (hasUnits()) {
        hash = (37 * hash) + UNITS_FIELD_NUMBER;
        hash = (53 * hash) + getUnits().hashCode();
      }
      if (hasType()) {
        hash = (37 * hash) + TYPE_FIELD_NUMBER;
        hash = (53 * hash) + type_;
      }
      if (getTagsCount() > 0) {
        hash = (37 * hash) + TAGS_FIELD_NUMBER;
        hash = (53 * hash) + getTagsList().hashCode();
      }
      if (getAttributesCount() > 0) {
        hash = (37 * hash) + ATTRIBUTES_FIELD_NUMBER;
        hash = (53 * hash) + getAttributesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceInformationProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceInformationProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceInformationProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceInformationProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        key_ = "";
        value_ = 0L;
        units_ = "";
        type_ = 0;
        tags_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        if (attributesBuilder_ == null) {
          attributes_ = java.util.Collections.emptyList();
        } else {
          attributes_ = null;
          attributesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceInformationProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto result) {
        if (attributesBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0)) {
            attributes_ = java.util.Collections.unmodifiableList(attributes_);
            bitField0_ = (bitField0_ & ~0x00000020);
          }
          result.attributes_ = attributes_;
        } else {
          result.attributes_ = attributesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.key_ = key_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.value_ = value_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.units_ = units_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.type_ = type_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          tags_.makeImmutable();
          result.tags_ = tags_;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.getDefaultInstance()) return this;
        if (other.hasKey()) {
          key_ = other.key_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasValue()) {
          setValue(other.getValue());
        }
        if (other.hasUnits()) {
          units_ = other.units_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasType()) {
          setType(other.getType());
        }
        if (!other.tags_.isEmpty()) {
          if (tags_.isEmpty()) {
            tags_ = other.tags_;
            bitField0_ |= 0x00000010;
          } else {
            ensureTagsIsMutable();
            tags_.addAll(other.tags_);
          }
          onChanged();
        }
        if (attributesBuilder_ == null) {
          if (!other.attributes_.isEmpty()) {
            if (attributes_.isEmpty()) {
              attributes_ = other.attributes_;
              bitField0_ = (bitField0_ & ~0x00000020);
            } else {
              ensureAttributesIsMutable();
              attributes_.addAll(other.attributes_);
            }
            onChanged();
          }
        } else {
          if (!other.attributes_.isEmpty()) {
            if (attributesBuilder_.isEmpty()) {
              attributesBuilder_.dispose();
              attributesBuilder_ = null;
              attributes_ = other.attributes_;
              bitField0_ = (bitField0_ & ~0x00000020);
              attributesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getAttributesFieldBuilder() : null;
            } else {
              attributesBuilder_.addAllMessages(other.attributes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasKey()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                key_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                value_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 26: {
                units_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 32: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(4, tmpRaw);
                } else {
                  type_ = tmpRaw;
                  bitField0_ |= 0x00000008;
                }
                break;
              } // case 32
              case 42: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureTagsIsMutable();
                tags_.add(bs);
                break;
              } // case 42
              case 50: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER,
                        extensionRegistry);
                if (attributesBuilder_ == null) {
                  ensureAttributesIsMutable();
                  attributes_.add(m);
                } else {
                  attributesBuilder_.addMessage(m);
                }
                break;
              } // case 50
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object key_ = "";
      /**
       * <code>required string key = 1;</code>
       * @return Whether the key field is set.
       */
      public boolean hasKey() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string key = 1;</code>
       * @return The key.
       */
      public java.lang.String getKey() {
        java.lang.Object ref = key_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            key_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string key = 1;</code>
       * @return The bytes for key.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getKeyBytes() {
        java.lang.Object ref = key_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          key_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string key = 1;</code>
       * @param value The key to set.
       * @return This builder for chaining.
       */
      public Builder setKey(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string key = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearKey() {
        key_ = getDefaultInstance().getKey();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string key = 1;</code>
       * @param value The bytes for key to set.
       * @return This builder for chaining.
       */
      public Builder setKeyBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private long value_ ;
      /**
       * <code>optional int64 value = 2;</code>
       * @return Whether the value field is set.
       */
      @java.lang.Override
      public boolean hasValue() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int64 value = 2;</code>
       * @return The value.
       */
      @java.lang.Override
      public long getValue() {
        return value_;
      }
      /**
       * <code>optional int64 value = 2;</code>
       * @param value The value to set.
       * @return This builder for chaining.
       */
      public Builder setValue(long value) {

        value_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 value = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearValue() {
        bitField0_ = (bitField0_ & ~0x00000002);
        value_ = 0L;
        onChanged();
        return this;
      }

      private java.lang.Object units_ = "";
      /**
       * <code>optional string units = 3;</code>
       * @return Whether the units field is set.
       */
      public boolean hasUnits() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string units = 3;</code>
       * @return The units.
       */
      public java.lang.String getUnits() {
        java.lang.Object ref = units_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            units_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string units = 3;</code>
       * @return The bytes for units.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUnitsBytes() {
        java.lang.Object ref = units_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          units_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string units = 3;</code>
       * @param value The units to set.
       * @return This builder for chaining.
       */
      public Builder setUnits(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        units_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string units = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearUnits() {
        units_ = getDefaultInstance().getUnits();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string units = 3;</code>
       * @param value The bytes for units to set.
       * @return This builder for chaining.
       */
      public Builder setUnitsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        units_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private int type_ = 0;
      /**
       * <code>optional .hadoop.yarn.ResourceTypesProto type = 4;</code>
       * @return Whether the type field is set.
       */
      @java.lang.Override public boolean hasType() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceTypesProto type = 4;</code>
       * @return The type.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto result = org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(type_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.COUNTABLE : result;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceTypesProto type = 4;</code>
       * @param value The type to set.
       * @return This builder for chaining.
       */
      public Builder setType(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000008;
        type_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceTypesProto type = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearType() {
        bitField0_ = (bitField0_ & ~0x00000008);
        type_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList tags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureTagsIsMutable() {
        if (!tags_.isModifiable()) {
          tags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(tags_);
        }
        bitField0_ |= 0x00000010;
      }
      /**
       * <code>repeated string tags = 5;</code>
       * @return A list containing the tags.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getTagsList() {
        tags_.makeImmutable();
        return tags_;
      }
      /**
       * <code>repeated string tags = 5;</code>
       * @return The count of tags.
       */
      public int getTagsCount() {
        return tags_.size();
      }
      /**
       * <code>repeated string tags = 5;</code>
       * @param index The index of the element to return.
       * @return The tags at the given index.
       */
      public java.lang.String getTags(int index) {
        return tags_.get(index);
      }
      /**
       * <code>repeated string tags = 5;</code>
       * @param index The index of the value to return.
       * @return The bytes of the tags at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTagsBytes(int index) {
        return tags_.getByteString(index);
      }
      /**
       * <code>repeated string tags = 5;</code>
       * @param index The index to set the value at.
       * @param value The tags to set.
       * @return This builder for chaining.
       */
      public Builder setTags(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureTagsIsMutable();
        tags_.set(index, value);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string tags = 5;</code>
       * @param value The tags to add.
       * @return This builder for chaining.
       */
      public Builder addTags(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureTagsIsMutable();
        tags_.add(value);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string tags = 5;</code>
       * @param values The tags to add.
       * @return This builder for chaining.
       */
      public Builder addAllTags(
          java.lang.Iterable<java.lang.String> values) {
        ensureTagsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, tags_);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string tags = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearTags() {
        tags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000010);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string tags = 5;</code>
       * @param value The bytes of the tags to add.
       * @return This builder for chaining.
       */
      public Builder addTagsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureTagsIsMutable();
        tags_.add(value);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> attributes_ =
        java.util.Collections.emptyList();
      private void ensureAttributesIsMutable() {
        if (!((bitField0_ & 0x00000020) != 0)) {
          attributes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto>(attributes_);
          bitField0_ |= 0x00000020;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> attributesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getAttributesList() {
        if (attributesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(attributes_);
        } else {
          return attributesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public int getAttributesCount() {
        if (attributesBuilder_ == null) {
          return attributes_.size();
        } else {
          return attributesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getAttributes(int index) {
        if (attributesBuilder_ == null) {
          return attributes_.get(index);
        } else {
          return attributesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public Builder setAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (attributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAttributesIsMutable();
          attributes_.set(index, value);
          onChanged();
        } else {
          attributesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public Builder setAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (attributesBuilder_ == null) {
          ensureAttributesIsMutable();
          attributes_.set(index, builderForValue.build());
          onChanged();
        } else {
          attributesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public Builder addAttributes(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (attributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAttributesIsMutable();
          attributes_.add(value);
          onChanged();
        } else {
          attributesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public Builder addAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (attributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAttributesIsMutable();
          attributes_.add(index, value);
          onChanged();
        } else {
          attributesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public Builder addAttributes(
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (attributesBuilder_ == null) {
          ensureAttributesIsMutable();
          attributes_.add(builderForValue.build());
          onChanged();
        } else {
          attributesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public Builder addAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (attributesBuilder_ == null) {
          ensureAttributesIsMutable();
          attributes_.add(index, builderForValue.build());
          onChanged();
        } else {
          attributesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public Builder addAllAttributes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
        if (attributesBuilder_ == null) {
          ensureAttributesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, attributes_);
          onChanged();
        } else {
          attributesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public Builder clearAttributes() {
        if (attributesBuilder_ == null) {
          attributes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000020);
          onChanged();
        } else {
          attributesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public Builder removeAttributes(int index) {
        if (attributesBuilder_ == null) {
          ensureAttributesIsMutable();
          attributes_.remove(index);
          onChanged();
        } else {
          attributesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getAttributesBuilder(
          int index) {
        return getAttributesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getAttributesOrBuilder(
          int index) {
        if (attributesBuilder_ == null) {
          return attributes_.get(index);  } else {
          return attributesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
           getAttributesOrBuilderList() {
        if (attributesBuilder_ != null) {
          return attributesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(attributes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addAttributesBuilder() {
        return getAttributesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addAttributesBuilder(
          int index) {
        return getAttributesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto attributes = 6;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder> 
           getAttributesBuilderList() {
        return getAttributesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
          getAttributesFieldBuilder() {
        if (attributesBuilder_ == null) {
          attributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
                  attributes_,
                  ((bitField0_ & 0x00000020) != 0),
                  getParentForChildren(),
                  isClean());
          attributes_ = null;
        }
        return attributesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceInformationProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceInformationProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceInformationProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceInformationProto>() {
      @java.lang.Override
      public ResourceInformationProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceInformationProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceInformationProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceTypeInfoProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceTypeInfoProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string name = 1;</code>
     * @return Whether the name field is set.
     */
    boolean hasName();
    /**
     * <code>required string name = 1;</code>
     * @return The name.
     */
    java.lang.String getName();
    /**
     * <code>required string name = 1;</code>
     * @return The bytes for name.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes();

    /**
     * <code>optional string units = 2;</code>
     * @return Whether the units field is set.
     */
    boolean hasUnits();
    /**
     * <code>optional string units = 2;</code>
     * @return The units.
     */
    java.lang.String getUnits();
    /**
     * <code>optional string units = 2;</code>
     * @return The bytes for units.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUnitsBytes();

    /**
     * <code>optional .hadoop.yarn.ResourceTypesProto type = 3;</code>
     * @return Whether the type field is set.
     */
    boolean hasType();
    /**
     * <code>optional .hadoop.yarn.ResourceTypesProto type = 3;</code>
     * @return The type.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceTypeInfoProto}
   */
  public static final class ResourceTypeInfoProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceTypeInfoProto)
      ResourceTypeInfoProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceTypeInfoProto.newBuilder() to construct.
    private ResourceTypeInfoProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceTypeInfoProto() {
      name_ = "";
      units_ = "";
      type_ = 0;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceTypeInfoProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceTypeInfoProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder.class);
    }

    private int bitField0_;
    public static final int NAME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object name_ = "";
    /**
     * <code>required string name = 1;</code>
     * @return Whether the name field is set.
     */
    @java.lang.Override
    public boolean hasName() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string name = 1;</code>
     * @return The name.
     */
    @java.lang.Override
    public java.lang.String getName() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          name_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string name = 1;</code>
     * @return The bytes for name.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        name_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int UNITS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object units_ = "";
    /**
     * <code>optional string units = 2;</code>
     * @return Whether the units field is set.
     */
    @java.lang.Override
    public boolean hasUnits() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string units = 2;</code>
     * @return The units.
     */
    @java.lang.Override
    public java.lang.String getUnits() {
      java.lang.Object ref = units_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          units_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string units = 2;</code>
     * @return The bytes for units.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUnitsBytes() {
      java.lang.Object ref = units_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        units_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int TYPE_FIELD_NUMBER = 3;
    private int type_ = 0;
    /**
     * <code>optional .hadoop.yarn.ResourceTypesProto type = 3;</code>
     * @return Whether the type field is set.
     */
    @java.lang.Override public boolean hasType() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceTypesProto type = 3;</code>
     * @return The type.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto result = org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(type_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.COUNTABLE : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasName()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, units_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeEnum(3, type_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, units_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(3, type_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto) obj;

      if (hasName() != other.hasName()) return false;
      if (hasName()) {
        if (!getName()
            .equals(other.getName())) return false;
      }
      if (hasUnits() != other.hasUnits()) return false;
      if (hasUnits()) {
        if (!getUnits()
            .equals(other.getUnits())) return false;
      }
      if (hasType() != other.hasType()) return false;
      if (hasType()) {
        if (type_ != other.type_) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasName()) {
        hash = (37 * hash) + NAME_FIELD_NUMBER;
        hash = (53 * hash) + getName().hashCode();
      }
      if (hasUnits()) {
        hash = (37 * hash) + UNITS_FIELD_NUMBER;
        hash = (53 * hash) + getUnits().hashCode();
      }
      if (hasType()) {
        hash = (37 * hash) + TYPE_FIELD_NUMBER;
        hash = (53 * hash) + type_;
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceTypeInfoProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceTypeInfoProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceTypeInfoProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        name_ = "";
        units_ = "";
        type_ = 0;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.name_ = name_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.units_ = units_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.type_ = type_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto.getDefaultInstance()) return this;
        if (other.hasName()) {
          name_ = other.name_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasUnits()) {
          units_ = other.units_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasType()) {
          setType(other.getType());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasName()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                name_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                units_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(3, tmpRaw);
                } else {
                  type_ = tmpRaw;
                  bitField0_ |= 0x00000004;
                }
                break;
              } // case 24
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object name_ = "";
      /**
       * <code>required string name = 1;</code>
       * @return Whether the name field is set.
       */
      public boolean hasName() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string name = 1;</code>
       * @return The name.
       */
      public java.lang.String getName() {
        java.lang.Object ref = name_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            name_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string name = 1;</code>
       * @return The bytes for name.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNameBytes() {
        java.lang.Object ref = name_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          name_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string name = 1;</code>
       * @param value The name to set.
       * @return This builder for chaining.
       */
      public Builder setName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string name = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearName() {
        name_ = getDefaultInstance().getName();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string name = 1;</code>
       * @param value The bytes for name to set.
       * @return This builder for chaining.
       */
      public Builder setNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object units_ = "";
      /**
       * <code>optional string units = 2;</code>
       * @return Whether the units field is set.
       */
      public boolean hasUnits() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string units = 2;</code>
       * @return The units.
       */
      public java.lang.String getUnits() {
        java.lang.Object ref = units_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            units_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string units = 2;</code>
       * @return The bytes for units.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUnitsBytes() {
        java.lang.Object ref = units_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          units_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string units = 2;</code>
       * @param value The units to set.
       * @return This builder for chaining.
       */
      public Builder setUnits(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        units_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string units = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearUnits() {
        units_ = getDefaultInstance().getUnits();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string units = 2;</code>
       * @param value The bytes for units to set.
       * @return This builder for chaining.
       */
      public Builder setUnitsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        units_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private int type_ = 0;
      /**
       * <code>optional .hadoop.yarn.ResourceTypesProto type = 3;</code>
       * @return Whether the type field is set.
       */
      @java.lang.Override public boolean hasType() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceTypesProto type = 3;</code>
       * @return The type.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto getType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto result = org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.forNumber(type_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto.COUNTABLE : result;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceTypesProto type = 3;</code>
       * @param value The type to set.
       * @return This builder for chaining.
       */
      public Builder setType(org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypesProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000004;
        type_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceTypesProto type = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearType() {
        bitField0_ = (bitField0_ & ~0x00000004);
        type_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceTypeInfoProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceTypeInfoProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceTypeInfoProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceTypeInfoProto>() {
      @java.lang.Override
      public ResourceTypeInfoProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceTypeInfoProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceTypeInfoProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceTypeInfoProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int64 memory = 1;</code>
     * @return Whether the memory field is set.
     */
    boolean hasMemory();
    /**
     * <code>optional int64 memory = 1;</code>
     * @return The memory.
     */
    long getMemory();

    /**
     * <code>optional int32 virtual_cores = 2;</code>
     * @return Whether the virtualCores field is set.
     */
    boolean hasVirtualCores();
    /**
     * <code>optional int32 virtual_cores = 2;</code>
     * @return The virtualCores.
     */
    int getVirtualCores();

    /**
     * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto> 
        getResourceValueMapList();
    /**
     * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getResourceValueMap(int index);
    /**
     * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
     */
    int getResourceValueMapCount();
    /**
     * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder> 
        getResourceValueMapOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder getResourceValueMapOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceProto}
   */
  public static final class ResourceProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceProto)
      ResourceProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceProto.newBuilder() to construct.
    private ResourceProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceProto() {
      resourceValueMap_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder.class);
    }

    private int bitField0_;
    public static final int MEMORY_FIELD_NUMBER = 1;
    private long memory_ = 0L;
    /**
     * <code>optional int64 memory = 1;</code>
     * @return Whether the memory field is set.
     */
    @java.lang.Override
    public boolean hasMemory() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int64 memory = 1;</code>
     * @return The memory.
     */
    @java.lang.Override
    public long getMemory() {
      return memory_;
    }

    public static final int VIRTUAL_CORES_FIELD_NUMBER = 2;
    private int virtualCores_ = 0;
    /**
     * <code>optional int32 virtual_cores = 2;</code>
     * @return Whether the virtualCores field is set.
     */
    @java.lang.Override
    public boolean hasVirtualCores() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 virtual_cores = 2;</code>
     * @return The virtualCores.
     */
    @java.lang.Override
    public int getVirtualCores() {
      return virtualCores_;
    }

    public static final int RESOURCE_VALUE_MAP_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto> resourceValueMap_;
    /**
     * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto> getResourceValueMapList() {
      return resourceValueMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder> 
        getResourceValueMapOrBuilderList() {
      return resourceValueMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
     */
    @java.lang.Override
    public int getResourceValueMapCount() {
      return resourceValueMap_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getResourceValueMap(int index) {
      return resourceValueMap_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder getResourceValueMapOrBuilder(
        int index) {
      return resourceValueMap_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getResourceValueMapCount(); i++) {
        if (!getResourceValueMap(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt64(1, memory_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, virtualCores_);
      }
      for (int i = 0; i < resourceValueMap_.size(); i++) {
        output.writeMessage(3, resourceValueMap_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(1, memory_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, virtualCores_);
      }
      for (int i = 0; i < resourceValueMap_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, resourceValueMap_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto) obj;

      if (hasMemory() != other.hasMemory()) return false;
      if (hasMemory()) {
        if (getMemory()
            != other.getMemory()) return false;
      }
      if (hasVirtualCores() != other.hasVirtualCores()) return false;
      if (hasVirtualCores()) {
        if (getVirtualCores()
            != other.getVirtualCores()) return false;
      }
      if (!getResourceValueMapList()
          .equals(other.getResourceValueMapList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasMemory()) {
        hash = (37 * hash) + MEMORY_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getMemory());
      }
      if (hasVirtualCores()) {
        hash = (37 * hash) + VIRTUAL_CORES_FIELD_NUMBER;
        hash = (53 * hash) + getVirtualCores();
      }
      if (getResourceValueMapCount() > 0) {
        hash = (37 * hash) + RESOURCE_VALUE_MAP_FIELD_NUMBER;
        hash = (53 * hash) + getResourceValueMapList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        memory_ = 0L;
        virtualCores_ = 0;
        if (resourceValueMapBuilder_ == null) {
          resourceValueMap_ = java.util.Collections.emptyList();
        } else {
          resourceValueMap_ = null;
          resourceValueMapBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto result) {
        if (resourceValueMapBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0)) {
            resourceValueMap_ = java.util.Collections.unmodifiableList(resourceValueMap_);
            bitField0_ = (bitField0_ & ~0x00000004);
          }
          result.resourceValueMap_ = resourceValueMap_;
        } else {
          result.resourceValueMap_ = resourceValueMapBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.memory_ = memory_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.virtualCores_ = virtualCores_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) return this;
        if (other.hasMemory()) {
          setMemory(other.getMemory());
        }
        if (other.hasVirtualCores()) {
          setVirtualCores(other.getVirtualCores());
        }
        if (resourceValueMapBuilder_ == null) {
          if (!other.resourceValueMap_.isEmpty()) {
            if (resourceValueMap_.isEmpty()) {
              resourceValueMap_ = other.resourceValueMap_;
              bitField0_ = (bitField0_ & ~0x00000004);
            } else {
              ensureResourceValueMapIsMutable();
              resourceValueMap_.addAll(other.resourceValueMap_);
            }
            onChanged();
          }
        } else {
          if (!other.resourceValueMap_.isEmpty()) {
            if (resourceValueMapBuilder_.isEmpty()) {
              resourceValueMapBuilder_.dispose();
              resourceValueMapBuilder_ = null;
              resourceValueMap_ = other.resourceValueMap_;
              bitField0_ = (bitField0_ & ~0x00000004);
              resourceValueMapBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getResourceValueMapFieldBuilder() : null;
            } else {
              resourceValueMapBuilder_.addAllMessages(other.resourceValueMap_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getResourceValueMapCount(); i++) {
          if (!getResourceValueMap(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                memory_ = input.readInt64();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 16: {
                virtualCores_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 26: {
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.PARSER,
                        extensionRegistry);
                if (resourceValueMapBuilder_ == null) {
                  ensureResourceValueMapIsMutable();
                  resourceValueMap_.add(m);
                } else {
                  resourceValueMapBuilder_.addMessage(m);
                }
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private long memory_ ;
      /**
       * <code>optional int64 memory = 1;</code>
       * @return Whether the memory field is set.
       */
      @java.lang.Override
      public boolean hasMemory() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int64 memory = 1;</code>
       * @return The memory.
       */
      @java.lang.Override
      public long getMemory() {
        return memory_;
      }
      /**
       * <code>optional int64 memory = 1;</code>
       * @param value The memory to set.
       * @return This builder for chaining.
       */
      public Builder setMemory(long value) {

        memory_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 memory = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearMemory() {
        bitField0_ = (bitField0_ & ~0x00000001);
        memory_ = 0L;
        onChanged();
        return this;
      }

      private int virtualCores_ ;
      /**
       * <code>optional int32 virtual_cores = 2;</code>
       * @return Whether the virtualCores field is set.
       */
      @java.lang.Override
      public boolean hasVirtualCores() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 virtual_cores = 2;</code>
       * @return The virtualCores.
       */
      @java.lang.Override
      public int getVirtualCores() {
        return virtualCores_;
      }
      /**
       * <code>optional int32 virtual_cores = 2;</code>
       * @param value The virtualCores to set.
       * @return This builder for chaining.
       */
      public Builder setVirtualCores(int value) {

        virtualCores_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 virtual_cores = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearVirtualCores() {
        bitField0_ = (bitField0_ & ~0x00000002);
        virtualCores_ = 0;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto> resourceValueMap_ =
        java.util.Collections.emptyList();
      private void ensureResourceValueMapIsMutable() {
        if (!((bitField0_ & 0x00000004) != 0)) {
          resourceValueMap_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto>(resourceValueMap_);
          bitField0_ |= 0x00000004;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder> resourceValueMapBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto> getResourceValueMapList() {
        if (resourceValueMapBuilder_ == null) {
          return java.util.Collections.unmodifiableList(resourceValueMap_);
        } else {
          return resourceValueMapBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public int getResourceValueMapCount() {
        if (resourceValueMapBuilder_ == null) {
          return resourceValueMap_.size();
        } else {
          return resourceValueMapBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto getResourceValueMap(int index) {
        if (resourceValueMapBuilder_ == null) {
          return resourceValueMap_.get(index);
        } else {
          return resourceValueMapBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public Builder setResourceValueMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto value) {
        if (resourceValueMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceValueMapIsMutable();
          resourceValueMap_.set(index, value);
          onChanged();
        } else {
          resourceValueMapBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public Builder setResourceValueMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder builderForValue) {
        if (resourceValueMapBuilder_ == null) {
          ensureResourceValueMapIsMutable();
          resourceValueMap_.set(index, builderForValue.build());
          onChanged();
        } else {
          resourceValueMapBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public Builder addResourceValueMap(org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto value) {
        if (resourceValueMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceValueMapIsMutable();
          resourceValueMap_.add(value);
          onChanged();
        } else {
          resourceValueMapBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public Builder addResourceValueMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto value) {
        if (resourceValueMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceValueMapIsMutable();
          resourceValueMap_.add(index, value);
          onChanged();
        } else {
          resourceValueMapBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public Builder addResourceValueMap(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder builderForValue) {
        if (resourceValueMapBuilder_ == null) {
          ensureResourceValueMapIsMutable();
          resourceValueMap_.add(builderForValue.build());
          onChanged();
        } else {
          resourceValueMapBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public Builder addResourceValueMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder builderForValue) {
        if (resourceValueMapBuilder_ == null) {
          ensureResourceValueMapIsMutable();
          resourceValueMap_.add(index, builderForValue.build());
          onChanged();
        } else {
          resourceValueMapBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public Builder addAllResourceValueMap(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto> values) {
        if (resourceValueMapBuilder_ == null) {
          ensureResourceValueMapIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, resourceValueMap_);
          onChanged();
        } else {
          resourceValueMapBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public Builder clearResourceValueMap() {
        if (resourceValueMapBuilder_ == null) {
          resourceValueMap_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
          onChanged();
        } else {
          resourceValueMapBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public Builder removeResourceValueMap(int index) {
        if (resourceValueMapBuilder_ == null) {
          ensureResourceValueMapIsMutable();
          resourceValueMap_.remove(index);
          onChanged();
        } else {
          resourceValueMapBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder getResourceValueMapBuilder(
          int index) {
        return getResourceValueMapFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder getResourceValueMapOrBuilder(
          int index) {
        if (resourceValueMapBuilder_ == null) {
          return resourceValueMap_.get(index);  } else {
          return resourceValueMapBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder> 
           getResourceValueMapOrBuilderList() {
        if (resourceValueMapBuilder_ != null) {
          return resourceValueMapBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(resourceValueMap_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder addResourceValueMapBuilder() {
        return getResourceValueMapFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder addResourceValueMapBuilder(
          int index) {
        return getResourceValueMapFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceInformationProto resource_value_map = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder> 
           getResourceValueMapBuilderList() {
        return getResourceValueMapFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder> 
          getResourceValueMapFieldBuilder() {
        if (resourceValueMapBuilder_ == null) {
          resourceValueMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceInformationProtoOrBuilder>(
                  resourceValueMap_,
                  ((bitField0_ & 0x00000004) != 0),
                  getParentForChildren(),
                  isClean());
          resourceValueMap_ = null;
        }
        return resourceValueMapBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceProto>() {
      @java.lang.Override
      public ResourceProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceUtilizationProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceUtilizationProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int32 pmem = 1;</code>
     * @return Whether the pmem field is set.
     */
    boolean hasPmem();
    /**
     * <code>optional int32 pmem = 1;</code>
     * @return The pmem.
     */
    int getPmem();

    /**
     * <code>optional int32 vmem = 2;</code>
     * @return Whether the vmem field is set.
     */
    boolean hasVmem();
    /**
     * <code>optional int32 vmem = 2;</code>
     * @return The vmem.
     */
    int getVmem();

    /**
     * <code>optional float cpu = 3;</code>
     * @return Whether the cpu field is set.
     */
    boolean hasCpu();
    /**
     * <code>optional float cpu = 3;</code>
     * @return The cpu.
     */
    float getCpu();

    /**
     * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto> 
        getCustomResourcesList();
    /**
     * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getCustomResources(int index);
    /**
     * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
     */
    int getCustomResourcesCount();
    /**
     * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder> 
        getCustomResourcesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder getCustomResourcesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceUtilizationProto}
   */
  public static final class ResourceUtilizationProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceUtilizationProto)
      ResourceUtilizationProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceUtilizationProto.newBuilder() to construct.
    private ResourceUtilizationProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceUtilizationProto() {
      customResources_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceUtilizationProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceUtilizationProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder.class);
    }

    private int bitField0_;
    public static final int PMEM_FIELD_NUMBER = 1;
    private int pmem_ = 0;
    /**
     * <code>optional int32 pmem = 1;</code>
     * @return Whether the pmem field is set.
     */
    @java.lang.Override
    public boolean hasPmem() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int32 pmem = 1;</code>
     * @return The pmem.
     */
    @java.lang.Override
    public int getPmem() {
      return pmem_;
    }

    public static final int VMEM_FIELD_NUMBER = 2;
    private int vmem_ = 0;
    /**
     * <code>optional int32 vmem = 2;</code>
     * @return Whether the vmem field is set.
     */
    @java.lang.Override
    public boolean hasVmem() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 vmem = 2;</code>
     * @return The vmem.
     */
    @java.lang.Override
    public int getVmem() {
      return vmem_;
    }

    public static final int CPU_FIELD_NUMBER = 3;
    private float cpu_ = 0F;
    /**
     * <code>optional float cpu = 3;</code>
     * @return Whether the cpu field is set.
     */
    @java.lang.Override
    public boolean hasCpu() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional float cpu = 3;</code>
     * @return The cpu.
     */
    @java.lang.Override
    public float getCpu() {
      return cpu_;
    }

    public static final int CUSTOMRESOURCES_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto> customResources_;
    /**
     * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto> getCustomResourcesList() {
      return customResources_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder> 
        getCustomResourcesOrBuilderList() {
      return customResources_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
     */
    @java.lang.Override
    public int getCustomResourcesCount() {
      return customResources_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getCustomResources(int index) {
      return customResources_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder getCustomResourcesOrBuilder(
        int index) {
      return customResources_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getCustomResourcesCount(); i++) {
        if (!getCustomResources(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, pmem_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, vmem_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeFloat(3, cpu_);
      }
      for (int i = 0; i < customResources_.size(); i++) {
        output.writeMessage(4, customResources_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, pmem_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, vmem_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(3, cpu_);
      }
      for (int i = 0; i < customResources_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, customResources_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto) obj;

      if (hasPmem() != other.hasPmem()) return false;
      if (hasPmem()) {
        if (getPmem()
            != other.getPmem()) return false;
      }
      if (hasVmem() != other.hasVmem()) return false;
      if (hasVmem()) {
        if (getVmem()
            != other.getVmem()) return false;
      }
      if (hasCpu() != other.hasCpu()) return false;
      if (hasCpu()) {
        if (java.lang.Float.floatToIntBits(getCpu())
            != java.lang.Float.floatToIntBits(
                other.getCpu())) return false;
      }
      if (!getCustomResourcesList()
          .equals(other.getCustomResourcesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasPmem()) {
        hash = (37 * hash) + PMEM_FIELD_NUMBER;
        hash = (53 * hash) + getPmem();
      }
      if (hasVmem()) {
        hash = (37 * hash) + VMEM_FIELD_NUMBER;
        hash = (53 * hash) + getVmem();
      }
      if (hasCpu()) {
        hash = (37 * hash) + CPU_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getCpu());
      }
      if (getCustomResourcesCount() > 0) {
        hash = (37 * hash) + CUSTOMRESOURCES_FIELD_NUMBER;
        hash = (53 * hash) + getCustomResourcesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceUtilizationProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceUtilizationProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceUtilizationProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        pmem_ = 0;
        vmem_ = 0;
        cpu_ = 0F;
        if (customResourcesBuilder_ == null) {
          customResources_ = java.util.Collections.emptyList();
        } else {
          customResources_ = null;
          customResourcesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto result) {
        if (customResourcesBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0)) {
            customResources_ = java.util.Collections.unmodifiableList(customResources_);
            bitField0_ = (bitField0_ & ~0x00000008);
          }
          result.customResources_ = customResources_;
        } else {
          result.customResources_ = customResourcesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.pmem_ = pmem_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.vmem_ = vmem_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.cpu_ = cpu_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance()) return this;
        if (other.hasPmem()) {
          setPmem(other.getPmem());
        }
        if (other.hasVmem()) {
          setVmem(other.getVmem());
        }
        if (other.hasCpu()) {
          setCpu(other.getCpu());
        }
        if (customResourcesBuilder_ == null) {
          if (!other.customResources_.isEmpty()) {
            if (customResources_.isEmpty()) {
              customResources_ = other.customResources_;
              bitField0_ = (bitField0_ & ~0x00000008);
            } else {
              ensureCustomResourcesIsMutable();
              customResources_.addAll(other.customResources_);
            }
            onChanged();
          }
        } else {
          if (!other.customResources_.isEmpty()) {
            if (customResourcesBuilder_.isEmpty()) {
              customResourcesBuilder_.dispose();
              customResourcesBuilder_ = null;
              customResources_ = other.customResources_;
              bitField0_ = (bitField0_ & ~0x00000008);
              customResourcesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getCustomResourcesFieldBuilder() : null;
            } else {
              customResourcesBuilder_.addAllMessages(other.customResources_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getCustomResourcesCount(); i++) {
          if (!getCustomResources(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                pmem_ = input.readInt32();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 16: {
                vmem_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 29: {
                cpu_ = input.readFloat();
                bitField0_ |= 0x00000004;
                break;
              } // case 29
              case 34: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.PARSER,
                        extensionRegistry);
                if (customResourcesBuilder_ == null) {
                  ensureCustomResourcesIsMutable();
                  customResources_.add(m);
                } else {
                  customResourcesBuilder_.addMessage(m);
                }
                break;
              } // case 34
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int pmem_ ;
      /**
       * <code>optional int32 pmem = 1;</code>
       * @return Whether the pmem field is set.
       */
      @java.lang.Override
      public boolean hasPmem() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int32 pmem = 1;</code>
       * @return The pmem.
       */
      @java.lang.Override
      public int getPmem() {
        return pmem_;
      }
      /**
       * <code>optional int32 pmem = 1;</code>
       * @param value The pmem to set.
       * @return This builder for chaining.
       */
      public Builder setPmem(int value) {

        pmem_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 pmem = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearPmem() {
        bitField0_ = (bitField0_ & ~0x00000001);
        pmem_ = 0;
        onChanged();
        return this;
      }

      private int vmem_ ;
      /**
       * <code>optional int32 vmem = 2;</code>
       * @return Whether the vmem field is set.
       */
      @java.lang.Override
      public boolean hasVmem() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 vmem = 2;</code>
       * @return The vmem.
       */
      @java.lang.Override
      public int getVmem() {
        return vmem_;
      }
      /**
       * <code>optional int32 vmem = 2;</code>
       * @param value The vmem to set.
       * @return This builder for chaining.
       */
      public Builder setVmem(int value) {

        vmem_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 vmem = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearVmem() {
        bitField0_ = (bitField0_ & ~0x00000002);
        vmem_ = 0;
        onChanged();
        return this;
      }

      private float cpu_ ;
      /**
       * <code>optional float cpu = 3;</code>
       * @return Whether the cpu field is set.
       */
      @java.lang.Override
      public boolean hasCpu() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional float cpu = 3;</code>
       * @return The cpu.
       */
      @java.lang.Override
      public float getCpu() {
        return cpu_;
      }
      /**
       * <code>optional float cpu = 3;</code>
       * @param value The cpu to set.
       * @return This builder for chaining.
       */
      public Builder setCpu(float value) {

        cpu_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional float cpu = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearCpu() {
        bitField0_ = (bitField0_ & ~0x00000004);
        cpu_ = 0F;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto> customResources_ =
        java.util.Collections.emptyList();
      private void ensureCustomResourcesIsMutable() {
        if (!((bitField0_ & 0x00000008) != 0)) {
          customResources_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto>(customResources_);
          bitField0_ |= 0x00000008;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder> customResourcesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto> getCustomResourcesList() {
        if (customResourcesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(customResources_);
        } else {
          return customResourcesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public int getCustomResourcesCount() {
        if (customResourcesBuilder_ == null) {
          return customResources_.size();
        } else {
          return customResourcesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getCustomResources(int index) {
        if (customResourcesBuilder_ == null) {
          return customResources_.get(index);
        } else {
          return customResourcesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public Builder setCustomResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto value) {
        if (customResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCustomResourcesIsMutable();
          customResources_.set(index, value);
          onChanged();
        } else {
          customResourcesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public Builder setCustomResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder builderForValue) {
        if (customResourcesBuilder_ == null) {
          ensureCustomResourcesIsMutable();
          customResources_.set(index, builderForValue.build());
          onChanged();
        } else {
          customResourcesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public Builder addCustomResources(org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto value) {
        if (customResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCustomResourcesIsMutable();
          customResources_.add(value);
          onChanged();
        } else {
          customResourcesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public Builder addCustomResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto value) {
        if (customResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureCustomResourcesIsMutable();
          customResources_.add(index, value);
          onChanged();
        } else {
          customResourcesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public Builder addCustomResources(
          org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder builderForValue) {
        if (customResourcesBuilder_ == null) {
          ensureCustomResourcesIsMutable();
          customResources_.add(builderForValue.build());
          onChanged();
        } else {
          customResourcesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public Builder addCustomResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder builderForValue) {
        if (customResourcesBuilder_ == null) {
          ensureCustomResourcesIsMutable();
          customResources_.add(index, builderForValue.build());
          onChanged();
        } else {
          customResourcesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public Builder addAllCustomResources(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto> values) {
        if (customResourcesBuilder_ == null) {
          ensureCustomResourcesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, customResources_);
          onChanged();
        } else {
          customResourcesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public Builder clearCustomResources() {
        if (customResourcesBuilder_ == null) {
          customResources_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000008);
          onChanged();
        } else {
          customResourcesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public Builder removeCustomResources(int index) {
        if (customResourcesBuilder_ == null) {
          ensureCustomResourcesIsMutable();
          customResources_.remove(index);
          onChanged();
        } else {
          customResourcesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder getCustomResourcesBuilder(
          int index) {
        return getCustomResourcesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder getCustomResourcesOrBuilder(
          int index) {
        if (customResourcesBuilder_ == null) {
          return customResources_.get(index);  } else {
          return customResourcesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder> 
           getCustomResourcesOrBuilderList() {
        if (customResourcesBuilder_ != null) {
          return customResourcesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(customResources_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder addCustomResourcesBuilder() {
        return getCustomResourcesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder addCustomResourcesBuilder(
          int index) {
        return getCustomResourcesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringFloatMapProto customResources = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder> 
           getCustomResourcesBuilderList() {
        return getCustomResourcesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder> 
          getCustomResourcesFieldBuilder() {
        if (customResourcesBuilder_ == null) {
          customResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder>(
                  customResources_,
                  ((bitField0_ & 0x00000008) != 0),
                  getParentForChildren(),
                  isClean());
          customResources_ = null;
        }
        return customResourcesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceUtilizationProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceUtilizationProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceUtilizationProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceUtilizationProto>() {
      @java.lang.Override
      public ResourceUtilizationProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceUtilizationProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceUtilizationProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceOptionProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceOptionProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
     * @return Whether the resource field is set.
     */
    boolean hasResource();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
     * @return The resource.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder();

    /**
     * <code>optional int32 over_commit_timeout = 2;</code>
     * @return Whether the overCommitTimeout field is set.
     */
    boolean hasOverCommitTimeout();
    /**
     * <code>optional int32 over_commit_timeout = 2;</code>
     * @return The overCommitTimeout.
     */
    int getOverCommitTimeout();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceOptionProto}
   */
  public static final class ResourceOptionProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceOptionProto)
      ResourceOptionProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceOptionProto.newBuilder() to construct.
    private ResourceOptionProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceOptionProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceOptionProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceOptionProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceOptionProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESOURCE_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
     * @return Whether the resource field is set.
     */
    @java.lang.Override
    public boolean hasResource() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
     * @return The resource.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
    }

    public static final int OVER_COMMIT_TIMEOUT_FIELD_NUMBER = 2;
    private int overCommitTimeout_ = 0;
    /**
     * <code>optional int32 over_commit_timeout = 2;</code>
     * @return Whether the overCommitTimeout field is set.
     */
    @java.lang.Override
    public boolean hasOverCommitTimeout() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 over_commit_timeout = 2;</code>
     * @return The overCommitTimeout.
     */
    @java.lang.Override
    public int getOverCommitTimeout() {
      return overCommitTimeout_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasResource()) {
        if (!getResource().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getResource());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, overCommitTimeout_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getResource());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, overCommitTimeout_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto) obj;

      if (hasResource() != other.hasResource()) return false;
      if (hasResource()) {
        if (!getResource()
            .equals(other.getResource())) return false;
      }
      if (hasOverCommitTimeout() != other.hasOverCommitTimeout()) return false;
      if (hasOverCommitTimeout()) {
        if (getOverCommitTimeout()
            != other.getOverCommitTimeout()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasResource()) {
        hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getResource().hashCode();
      }
      if (hasOverCommitTimeout()) {
        hash = (37 * hash) + OVER_COMMIT_TIMEOUT_FIELD_NUMBER;
        hash = (53 * hash) + getOverCommitTimeout();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceOptionProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceOptionProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceOptionProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceOptionProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getResourceFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        overCommitTimeout_ = 0;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceOptionProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.resource_ = resourceBuilder_ == null
              ? resource_
              : resourceBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.overCommitTimeout_ = overCommitTimeout_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance()) return this;
        if (other.hasResource()) {
          mergeResource(other.getResource());
        }
        if (other.hasOverCommitTimeout()) {
          setOverCommitTimeout(other.getOverCommitTimeout());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasResource()) {
          if (!getResource().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getResourceFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                overCommitTimeout_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
       * @return Whether the resource field is set.
       */
      public boolean hasResource() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
       * @return The resource.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
        if (resourceBuilder_ == null) {
          return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
        } else {
          return resourceBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
       */
      public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resource_ = value;
        } else {
          resourceBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
       */
      public Builder setResource(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          resource_ = builderForValue.build();
        } else {
          resourceBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
       */
      public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourceBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            resource_ != null &&
            resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getResourceBuilder().mergeFrom(value);
          } else {
            resource_ = value;
          }
        } else {
          resourceBuilder_.mergeFrom(value);
        }
        if (resource_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
       */
      public Builder clearResource() {
        bitField0_ = (bitField0_ & ~0x00000001);
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getResourceFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
        if (resourceBuilder_ != null) {
          return resourceBuilder_.getMessageOrBuilder();
        } else {
          return resource_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getResourceFieldBuilder() {
        if (resourceBuilder_ == null) {
          resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getResource(),
                  getParentForChildren(),
                  isClean());
          resource_ = null;
        }
        return resourceBuilder_;
      }

      private int overCommitTimeout_ ;
      /**
       * <code>optional int32 over_commit_timeout = 2;</code>
       * @return Whether the overCommitTimeout field is set.
       */
      @java.lang.Override
      public boolean hasOverCommitTimeout() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 over_commit_timeout = 2;</code>
       * @return The overCommitTimeout.
       */
      @java.lang.Override
      public int getOverCommitTimeout() {
        return overCommitTimeout_;
      }
      /**
       * <code>optional int32 over_commit_timeout = 2;</code>
       * @param value The overCommitTimeout to set.
       * @return This builder for chaining.
       */
      public Builder setOverCommitTimeout(int value) {

        overCommitTimeout_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 over_commit_timeout = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearOverCommitTimeout() {
        bitField0_ = (bitField0_ & ~0x00000002);
        overCommitTimeout_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceOptionProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceOptionProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceOptionProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceOptionProto>() {
      @java.lang.Override
      public ResourceOptionProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceOptionProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceOptionProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceProfileEntryOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceProfileEntry)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string name = 1;</code>
     * @return Whether the name field is set.
     */
    boolean hasName();
    /**
     * <code>required string name = 1;</code>
     * @return The name.
     */
    java.lang.String getName();
    /**
     * <code>required string name = 1;</code>
     * @return The bytes for name.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes();

    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
     * @return Whether the resources field is set.
     */
    boolean hasResources();
    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
     * @return The resources.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources();
    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceProfileEntry}
   */
  public static final class ResourceProfileEntry extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceProfileEntry)
      ResourceProfileEntryOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceProfileEntry.newBuilder() to construct.
    private ResourceProfileEntry(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceProfileEntry() {
      name_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceProfileEntry();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfileEntry_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfileEntry_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder.class);
    }

    private int bitField0_;
    public static final int NAME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object name_ = "";
    /**
     * <code>required string name = 1;</code>
     * @return Whether the name field is set.
     */
    @java.lang.Override
    public boolean hasName() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string name = 1;</code>
     * @return The name.
     */
    @java.lang.Override
    public java.lang.String getName() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          name_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string name = 1;</code>
     * @return The bytes for name.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        name_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RESOURCES_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_;
    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
     * @return Whether the resources field is set.
     */
    @java.lang.Override
    public boolean hasResources() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
     * @return The resources.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() {
      return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
    }
    /**
     * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() {
      return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasName()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasResources()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getResources().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getResources());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getResources());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry) obj;

      if (hasName() != other.hasName()) return false;
      if (hasName()) {
        if (!getName()
            .equals(other.getName())) return false;
      }
      if (hasResources() != other.hasResources()) return false;
      if (hasResources()) {
        if (!getResources()
            .equals(other.getResources())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasName()) {
        hash = (37 * hash) + NAME_FIELD_NUMBER;
        hash = (53 * hash) + getName().hashCode();
      }
      if (hasResources()) {
        hash = (37 * hash) + RESOURCES_FIELD_NUMBER;
        hash = (53 * hash) + getResources().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceProfileEntry}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceProfileEntry)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfileEntry_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfileEntry_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getResourcesFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        name_ = "";
        resources_ = null;
        if (resourcesBuilder_ != null) {
          resourcesBuilder_.dispose();
          resourcesBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfileEntry_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.name_ = name_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.resources_ = resourcesBuilder_ == null
              ? resources_
              : resourcesBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.getDefaultInstance()) return this;
        if (other.hasName()) {
          name_ = other.name_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasResources()) {
          mergeResources(other.getResources());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasName()) {
          return false;
        }
        if (!hasResources()) {
          return false;
        }
        if (!getResources().isInitialized()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                name_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getResourcesFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object name_ = "";
      /**
       * <code>required string name = 1;</code>
       * @return Whether the name field is set.
       */
      public boolean hasName() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string name = 1;</code>
       * @return The name.
       */
      public java.lang.String getName() {
        java.lang.Object ref = name_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            name_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string name = 1;</code>
       * @return The bytes for name.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNameBytes() {
        java.lang.Object ref = name_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          name_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string name = 1;</code>
       * @param value The name to set.
       * @return This builder for chaining.
       */
      public Builder setName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string name = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearName() {
        name_ = getDefaultInstance().getName();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string name = 1;</code>
       * @param value The bytes for name to set.
       * @return This builder for chaining.
       */
      public Builder setNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourcesBuilder_;
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
       * @return Whether the resources field is set.
       */
      public boolean hasResources() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
       * @return The resources.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() {
        if (resourcesBuilder_ == null) {
          return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
        } else {
          return resourcesBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public Builder setResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resources_ = value;
        } else {
          resourcesBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public Builder setResources(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (resourcesBuilder_ == null) {
          resources_ = builderForValue.build();
        } else {
          resourcesBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public Builder mergeResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourcesBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            resources_ != null &&
            resources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getResourcesBuilder().mergeFrom(value);
          } else {
            resources_ = value;
          }
        } else {
          resourcesBuilder_.mergeFrom(value);
        }
        if (resources_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public Builder clearResources() {
        bitField0_ = (bitField0_ & ~0x00000002);
        resources_ = null;
        if (resourcesBuilder_ != null) {
          resourcesBuilder_.dispose();
          resourcesBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourcesBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getResourcesFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() {
        if (resourcesBuilder_ != null) {
          return resourcesBuilder_.getMessageOrBuilder();
        } else {
          return resources_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
        }
      }
      /**
       * <code>required .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getResourcesFieldBuilder() {
        if (resourcesBuilder_ == null) {
          resourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getResources(),
                  getParentForChildren(),
                  isClean());
          resources_ = null;
        }
        return resourcesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceProfileEntry)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceProfileEntry)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceProfileEntry>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceProfileEntry>() {
      @java.lang.Override
      public ResourceProfileEntry parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceProfileEntry> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceProfileEntry> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceProfilesProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceProfilesProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry> 
        getResourceProfilesMapList();
    /**
     * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getResourceProfilesMap(int index);
    /**
     * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
     */
    int getResourceProfilesMapCount();
    /**
     * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder> 
        getResourceProfilesMapOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder getResourceProfilesMapOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceProfilesProto}
   */
  public static final class ResourceProfilesProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceProfilesProto)
      ResourceProfilesProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceProfilesProto.newBuilder() to construct.
    private ResourceProfilesProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceProfilesProto() {
      resourceProfilesMap_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceProfilesProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfilesProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfilesProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder.class);
    }

    public static final int RESOURCE_PROFILES_MAP_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry> resourceProfilesMap_;
    /**
     * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry> getResourceProfilesMapList() {
      return resourceProfilesMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder> 
        getResourceProfilesMapOrBuilderList() {
      return resourceProfilesMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
     */
    @java.lang.Override
    public int getResourceProfilesMapCount() {
      return resourceProfilesMap_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getResourceProfilesMap(int index) {
      return resourceProfilesMap_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder getResourceProfilesMapOrBuilder(
        int index) {
      return resourceProfilesMap_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getResourceProfilesMapCount(); i++) {
        if (!getResourceProfilesMap(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < resourceProfilesMap_.size(); i++) {
        output.writeMessage(1, resourceProfilesMap_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < resourceProfilesMap_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, resourceProfilesMap_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto) obj;

      if (!getResourceProfilesMapList()
          .equals(other.getResourceProfilesMapList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getResourceProfilesMapCount() > 0) {
        hash = (37 * hash) + RESOURCE_PROFILES_MAP_FIELD_NUMBER;
        hash = (53 * hash) + getResourceProfilesMapList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceProfilesProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceProfilesProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfilesProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfilesProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (resourceProfilesMapBuilder_ == null) {
          resourceProfilesMap_ = java.util.Collections.emptyList();
        } else {
          resourceProfilesMap_ = null;
          resourceProfilesMapBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceProfilesProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto result) {
        if (resourceProfilesMapBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            resourceProfilesMap_ = java.util.Collections.unmodifiableList(resourceProfilesMap_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.resourceProfilesMap_ = resourceProfilesMap_;
        } else {
          result.resourceProfilesMap_ = resourceProfilesMapBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto.getDefaultInstance()) return this;
        if (resourceProfilesMapBuilder_ == null) {
          if (!other.resourceProfilesMap_.isEmpty()) {
            if (resourceProfilesMap_.isEmpty()) {
              resourceProfilesMap_ = other.resourceProfilesMap_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureResourceProfilesMapIsMutable();
              resourceProfilesMap_.addAll(other.resourceProfilesMap_);
            }
            onChanged();
          }
        } else {
          if (!other.resourceProfilesMap_.isEmpty()) {
            if (resourceProfilesMapBuilder_.isEmpty()) {
              resourceProfilesMapBuilder_.dispose();
              resourceProfilesMapBuilder_ = null;
              resourceProfilesMap_ = other.resourceProfilesMap_;
              bitField0_ = (bitField0_ & ~0x00000001);
              resourceProfilesMapBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getResourceProfilesMapFieldBuilder() : null;
            } else {
              resourceProfilesMapBuilder_.addAllMessages(other.resourceProfilesMap_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getResourceProfilesMapCount(); i++) {
          if (!getResourceProfilesMap(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.PARSER,
                        extensionRegistry);
                if (resourceProfilesMapBuilder_ == null) {
                  ensureResourceProfilesMapIsMutable();
                  resourceProfilesMap_.add(m);
                } else {
                  resourceProfilesMapBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry> resourceProfilesMap_ =
        java.util.Collections.emptyList();
      private void ensureResourceProfilesMapIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          resourceProfilesMap_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry>(resourceProfilesMap_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder> resourceProfilesMapBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry> getResourceProfilesMapList() {
        if (resourceProfilesMapBuilder_ == null) {
          return java.util.Collections.unmodifiableList(resourceProfilesMap_);
        } else {
          return resourceProfilesMapBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public int getResourceProfilesMapCount() {
        if (resourceProfilesMapBuilder_ == null) {
          return resourceProfilesMap_.size();
        } else {
          return resourceProfilesMapBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry getResourceProfilesMap(int index) {
        if (resourceProfilesMapBuilder_ == null) {
          return resourceProfilesMap_.get(index);
        } else {
          return resourceProfilesMapBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public Builder setResourceProfilesMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry value) {
        if (resourceProfilesMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceProfilesMapIsMutable();
          resourceProfilesMap_.set(index, value);
          onChanged();
        } else {
          resourceProfilesMapBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public Builder setResourceProfilesMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder builderForValue) {
        if (resourceProfilesMapBuilder_ == null) {
          ensureResourceProfilesMapIsMutable();
          resourceProfilesMap_.set(index, builderForValue.build());
          onChanged();
        } else {
          resourceProfilesMapBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public Builder addResourceProfilesMap(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry value) {
        if (resourceProfilesMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceProfilesMapIsMutable();
          resourceProfilesMap_.add(value);
          onChanged();
        } else {
          resourceProfilesMapBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public Builder addResourceProfilesMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry value) {
        if (resourceProfilesMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceProfilesMapIsMutable();
          resourceProfilesMap_.add(index, value);
          onChanged();
        } else {
          resourceProfilesMapBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public Builder addResourceProfilesMap(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder builderForValue) {
        if (resourceProfilesMapBuilder_ == null) {
          ensureResourceProfilesMapIsMutable();
          resourceProfilesMap_.add(builderForValue.build());
          onChanged();
        } else {
          resourceProfilesMapBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public Builder addResourceProfilesMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder builderForValue) {
        if (resourceProfilesMapBuilder_ == null) {
          ensureResourceProfilesMapIsMutable();
          resourceProfilesMap_.add(index, builderForValue.build());
          onChanged();
        } else {
          resourceProfilesMapBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public Builder addAllResourceProfilesMap(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry> values) {
        if (resourceProfilesMapBuilder_ == null) {
          ensureResourceProfilesMapIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, resourceProfilesMap_);
          onChanged();
        } else {
          resourceProfilesMapBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public Builder clearResourceProfilesMap() {
        if (resourceProfilesMapBuilder_ == null) {
          resourceProfilesMap_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          resourceProfilesMapBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public Builder removeResourceProfilesMap(int index) {
        if (resourceProfilesMapBuilder_ == null) {
          ensureResourceProfilesMapIsMutable();
          resourceProfilesMap_.remove(index);
          onChanged();
        } else {
          resourceProfilesMapBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder getResourceProfilesMapBuilder(
          int index) {
        return getResourceProfilesMapFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder getResourceProfilesMapOrBuilder(
          int index) {
        if (resourceProfilesMapBuilder_ == null) {
          return resourceProfilesMap_.get(index);  } else {
          return resourceProfilesMapBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder> 
           getResourceProfilesMapOrBuilderList() {
        if (resourceProfilesMapBuilder_ != null) {
          return resourceProfilesMapBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(resourceProfilesMap_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder addResourceProfilesMapBuilder() {
        return getResourceProfilesMapFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder addResourceProfilesMapBuilder(
          int index) {
        return getResourceProfilesMapFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceProfileEntry resource_profiles_map = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder> 
           getResourceProfilesMapBuilderList() {
        return getResourceProfilesMapFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder> 
          getResourceProfilesMapFieldBuilder() {
        if (resourceProfilesMapBuilder_ == null) {
          resourceProfilesMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntry.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfileEntryOrBuilder>(
                  resourceProfilesMap_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          resourceProfilesMap_ = null;
        }
        return resourceProfilesMapBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceProfilesProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceProfilesProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceProfilesProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceProfilesProto>() {
      @java.lang.Override
      public ResourceProfilesProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceProfilesProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceProfilesProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProfilesProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeResourceMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeResourceMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
     * @return Whether the nodeId field is set.
     */
    boolean hasNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
     * @return The nodeId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
     * @return Whether the resourceOption field is set.
     */
    boolean hasResourceOption();
    /**
     * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
     * @return The resourceOption.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getResourceOption();
    /**
     * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder getResourceOptionOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeResourceMapProto}
   */
  public static final class NodeResourceMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeResourceMapProto)
      NodeResourceMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeResourceMapProto.newBuilder() to construct.
    private NodeResourceMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeResourceMapProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeResourceMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeResourceMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeResourceMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int NODE_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
     * @return Whether the nodeId field is set.
     */
    @java.lang.Override
    public boolean hasNodeId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
     * @return The nodeId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }

    public static final int RESOURCE_OPTION_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto resourceOption_;
    /**
     * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
     * @return Whether the resourceOption field is set.
     */
    @java.lang.Override
    public boolean hasResourceOption() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
     * @return The resourceOption.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getResourceOption() {
      return resourceOption_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance() : resourceOption_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder getResourceOptionOrBuilder() {
      return resourceOption_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance() : resourceOption_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasResourceOption()) {
        if (!getResourceOption().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getNodeId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getResourceOption());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getNodeId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getResourceOption());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto) obj;

      if (hasNodeId() != other.hasNodeId()) return false;
      if (hasNodeId()) {
        if (!getNodeId()
            .equals(other.getNodeId())) return false;
      }
      if (hasResourceOption() != other.hasResourceOption()) return false;
      if (hasResourceOption()) {
        if (!getResourceOption()
            .equals(other.getResourceOption())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNodeId()) {
        hash = (37 * hash) + NODE_ID_FIELD_NUMBER;
        hash = (53 * hash) + getNodeId().hashCode();
      }
      if (hasResourceOption()) {
        hash = (37 * hash) + RESOURCE_OPTION_FIELD_NUMBER;
        hash = (53 * hash) + getResourceOption().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeResourceMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeResourceMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeResourceMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeResourceMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getNodeIdFieldBuilder();
          getResourceOptionFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        resourceOption_ = null;
        if (resourceOptionBuilder_ != null) {
          resourceOptionBuilder_.dispose();
          resourceOptionBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeResourceMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.nodeId_ = nodeIdBuilder_ == null
              ? nodeId_
              : nodeIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.resourceOption_ = resourceOptionBuilder_ == null
              ? resourceOption_
              : resourceOptionBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto.getDefaultInstance()) return this;
        if (other.hasNodeId()) {
          mergeNodeId(other.getNodeId());
        }
        if (other.hasResourceOption()) {
          mergeResourceOption(other.getResourceOption());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasResourceOption()) {
          if (!getResourceOption().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getNodeIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getResourceOptionFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
       * @return Whether the nodeId field is set.
       */
      public boolean hasNodeId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
       * @return The nodeId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
        if (nodeIdBuilder_ == null) {
          return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        } else {
          return nodeIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
       */
      public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeId_ = value;
        } else {
          nodeIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
       */
      public Builder setNodeId(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (nodeIdBuilder_ == null) {
          nodeId_ = builderForValue.build();
        } else {
          nodeIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
       */
      public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            nodeId_ != null &&
            nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
            getNodeIdBuilder().mergeFrom(value);
          } else {
            nodeId_ = value;
          }
        } else {
          nodeIdBuilder_.mergeFrom(value);
        }
        if (nodeId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
       */
      public Builder clearNodeId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getNodeIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
        if (nodeIdBuilder_ != null) {
          return nodeIdBuilder_.getMessageOrBuilder();
        } else {
          return nodeId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
          getNodeIdFieldBuilder() {
        if (nodeIdBuilder_ == null) {
          nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
                  getNodeId(),
                  getParentForChildren(),
                  isClean());
          nodeId_ = null;
        }
        return nodeIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto resourceOption_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder> resourceOptionBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
       * @return Whether the resourceOption field is set.
       */
      public boolean hasResourceOption() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
       * @return The resourceOption.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto getResourceOption() {
        if (resourceOptionBuilder_ == null) {
          return resourceOption_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance() : resourceOption_;
        } else {
          return resourceOptionBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
       */
      public Builder setResourceOption(org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto value) {
        if (resourceOptionBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resourceOption_ = value;
        } else {
          resourceOptionBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
       */
      public Builder setResourceOption(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder builderForValue) {
        if (resourceOptionBuilder_ == null) {
          resourceOption_ = builderForValue.build();
        } else {
          resourceOptionBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
       */
      public Builder mergeResourceOption(org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto value) {
        if (resourceOptionBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            resourceOption_ != null &&
            resourceOption_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance()) {
            getResourceOptionBuilder().mergeFrom(value);
          } else {
            resourceOption_ = value;
          }
        } else {
          resourceOptionBuilder_.mergeFrom(value);
        }
        if (resourceOption_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
       */
      public Builder clearResourceOption() {
        bitField0_ = (bitField0_ & ~0x00000002);
        resourceOption_ = null;
        if (resourceOptionBuilder_ != null) {
          resourceOptionBuilder_.dispose();
          resourceOptionBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder getResourceOptionBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getResourceOptionFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder getResourceOptionOrBuilder() {
        if (resourceOptionBuilder_ != null) {
          return resourceOptionBuilder_.getMessageOrBuilder();
        } else {
          return resourceOption_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.getDefaultInstance() : resourceOption_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceOptionProto resource_option = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder> 
          getResourceOptionFieldBuilder() {
        if (resourceOptionBuilder_ == null) {
          resourceOptionBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceOptionProtoOrBuilder>(
                  getResourceOption(),
                  getParentForChildren(),
                  isClean());
          resourceOption_ = null;
        }
        return resourceOptionBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeResourceMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeResourceMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeResourceMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeResourceMapProto>() {
      @java.lang.Override
      public NodeResourceMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeResourceMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeResourceMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeResourceMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface PriorityProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.PriorityProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int32 priority = 1;</code>
     * @return Whether the priority field is set.
     */
    boolean hasPriority();
    /**
     * <code>optional int32 priority = 1;</code>
     * @return The priority.
     */
    int getPriority();
  }
  /**
   * Protobuf type {@code hadoop.yarn.PriorityProto}
   */
  public static final class PriorityProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.PriorityProto)
      PriorityProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use PriorityProto.newBuilder() to construct.
    private PriorityProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private PriorityProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new PriorityProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PriorityProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PriorityProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder.class);
    }

    private int bitField0_;
    public static final int PRIORITY_FIELD_NUMBER = 1;
    private int priority_ = 0;
    /**
     * <code>optional int32 priority = 1;</code>
     * @return Whether the priority field is set.
     */
    @java.lang.Override
    public boolean hasPriority() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int32 priority = 1;</code>
     * @return The priority.
     */
    @java.lang.Override
    public int getPriority() {
      return priority_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, priority_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, priority_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto other = (org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto) obj;

      if (hasPriority() != other.hasPriority()) return false;
      if (hasPriority()) {
        if (getPriority()
            != other.getPriority()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasPriority()) {
        hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getPriority();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.PriorityProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.PriorityProto)
        org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PriorityProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PriorityProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        priority_ = 0;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PriorityProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.priority_ = priority_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) return this;
        if (other.hasPriority()) {
          setPriority(other.getPriority());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                priority_ = input.readInt32();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int priority_ ;
      /**
       * <code>optional int32 priority = 1;</code>
       * @return Whether the priority field is set.
       */
      @java.lang.Override
      public boolean hasPriority() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int32 priority = 1;</code>
       * @return The priority.
       */
      @java.lang.Override
      public int getPriority() {
        return priority_;
      }
      /**
       * <code>optional int32 priority = 1;</code>
       * @param value The priority to set.
       * @return This builder for chaining.
       */
      public Builder setPriority(int value) {

        priority_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 priority = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearPriority() {
        bitField0_ = (bitField0_ & ~0x00000001);
        priority_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.PriorityProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.PriorityProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<PriorityProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<PriorityProto>() {
      @java.lang.Override
      public PriorityProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<PriorityProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<PriorityProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     * @return Whether the id field is set.
     */
    boolean hasId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     * @return The id.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder();

    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     * @return Whether the nodeId field is set.
     */
    boolean hasNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     * @return The nodeId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();

    /**
     * <code>optional string node_http_address = 3;</code>
     * @return Whether the nodeHttpAddress field is set.
     */
    boolean hasNodeHttpAddress();
    /**
     * <code>optional string node_http_address = 3;</code>
     * @return The nodeHttpAddress.
     */
    java.lang.String getNodeHttpAddress();
    /**
     * <code>optional string node_http_address = 3;</code>
     * @return The bytes for nodeHttpAddress.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeHttpAddressBytes();

    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
     * @return Whether the resource field is set.
     */
    boolean hasResource();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
     * @return The resource.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder();

    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
     * @return Whether the priority field is set.
     */
    boolean hasPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
     * @return The priority.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();

    /**
     * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
     * @return Whether the containerToken field is set.
     */
    boolean hasContainerToken();
    /**
     * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
     * @return The containerToken.
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken();
    /**
     * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED];</code>
     * @return Whether the executionType field is set.
     */
    boolean hasExecutionType();
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED];</code>
     * @return The executionType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType();

    /**
     * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
     * @return Whether the allocationRequestId field is set.
     */
    boolean hasAllocationRequestId();
    /**
     * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
     * @return The allocationRequestId.
     */
    long getAllocationRequestId();

    /**
     * <code>optional int32 version = 9 [default = 0];</code>
     * @return Whether the version field is set.
     */
    boolean hasVersion();
    /**
     * <code>optional int32 version = 9 [default = 0];</code>
     * @return The version.
     */
    int getVersion();

    /**
     * <code>repeated string allocation_tags = 10;</code>
     * @return A list containing the allocationTags.
     */
    java.util.List<java.lang.String>
        getAllocationTagsList();
    /**
     * <code>repeated string allocation_tags = 10;</code>
     * @return The count of allocationTags.
     */
    int getAllocationTagsCount();
    /**
     * <code>repeated string allocation_tags = 10;</code>
     * @param index The index of the element to return.
     * @return The allocationTags at the given index.
     */
    java.lang.String getAllocationTags(int index);
    /**
     * <code>repeated string allocation_tags = 10;</code>
     * @param index The index of the value to return.
     * @return The bytes of the allocationTags at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAllocationTagsBytes(int index);

    /**
     * <code>optional string exposed_ports = 11;</code>
     * @return Whether the exposedPorts field is set.
     */
    boolean hasExposedPorts();
    /**
     * <code>optional string exposed_ports = 11;</code>
     * @return The exposedPorts.
     */
    java.lang.String getExposedPorts();
    /**
     * <code>optional string exposed_ports = 11;</code>
     * @return The bytes for exposedPorts.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getExposedPortsBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerProto}
   */
  public static final class ContainerProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerProto)
      ContainerProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerProto.newBuilder() to construct.
    private ContainerProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerProto() {
      nodeHttpAddress_ = "";
      executionType_ = 1;
      allocationRequestId_ = -1L;
      allocationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      exposedPorts_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder.class);
    }

    private int bitField0_;
    public static final int ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto id_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     * @return Whether the id field is set.
     */
    @java.lang.Override
    public boolean hasId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     * @return The id.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId() {
      return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder() {
      return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_;
    }

    public static final int NODEID_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     * @return Whether the nodeId field is set.
     */
    @java.lang.Override
    public boolean hasNodeId() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     * @return The nodeId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }

    public static final int NODE_HTTP_ADDRESS_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object nodeHttpAddress_ = "";
    /**
     * <code>optional string node_http_address = 3;</code>
     * @return Whether the nodeHttpAddress field is set.
     */
    @java.lang.Override
    public boolean hasNodeHttpAddress() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string node_http_address = 3;</code>
     * @return The nodeHttpAddress.
     */
    @java.lang.Override
    public java.lang.String getNodeHttpAddress() {
      java.lang.Object ref = nodeHttpAddress_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          nodeHttpAddress_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string node_http_address = 3;</code>
     * @return The bytes for nodeHttpAddress.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeHttpAddressBytes() {
      java.lang.Object ref = nodeHttpAddress_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        nodeHttpAddress_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RESOURCE_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
     * @return Whether the resource field is set.
     */
    @java.lang.Override
    public boolean hasResource() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
     * @return The resource.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
    }

    public static final int PRIORITY_FIELD_NUMBER = 5;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
     * @return Whether the priority field is set.
     */
    @java.lang.Override
    public boolean hasPriority() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
     * @return The priority.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }

    public static final int CONTAINER_TOKEN_FIELD_NUMBER = 6;
    private org.apache.hadoop.security.proto.SecurityProtos.TokenProto containerToken_;
    /**
     * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
     * @return Whether the containerToken field is set.
     */
    @java.lang.Override
    public boolean hasContainerToken() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
     * @return The containerToken.
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken() {
      return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_;
    }
    /**
     * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder() {
      return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_;
    }

    public static final int EXECUTION_TYPE_FIELD_NUMBER = 7;
    private int executionType_ = 1;
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED];</code>
     * @return Whether the executionType field is set.
     */
    @java.lang.Override public boolean hasExecutionType() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED];</code>
     * @return The executionType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
    }

    public static final int ALLOCATION_REQUEST_ID_FIELD_NUMBER = 8;
    private long allocationRequestId_ = -1L;
    /**
     * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
     * @return Whether the allocationRequestId field is set.
     */
    @java.lang.Override
    public boolean hasAllocationRequestId() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
     * @return The allocationRequestId.
     */
    @java.lang.Override
    public long getAllocationRequestId() {
      return allocationRequestId_;
    }

    public static final int VERSION_FIELD_NUMBER = 9;
    private int version_ = 0;
    /**
     * <code>optional int32 version = 9 [default = 0];</code>
     * @return Whether the version field is set.
     */
    @java.lang.Override
    public boolean hasVersion() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional int32 version = 9 [default = 0];</code>
     * @return The version.
     */
    @java.lang.Override
    public int getVersion() {
      return version_;
    }

    public static final int ALLOCATION_TAGS_FIELD_NUMBER = 10;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList allocationTags_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string allocation_tags = 10;</code>
     * @return A list containing the allocationTags.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getAllocationTagsList() {
      return allocationTags_;
    }
    /**
     * <code>repeated string allocation_tags = 10;</code>
     * @return The count of allocationTags.
     */
    public int getAllocationTagsCount() {
      return allocationTags_.size();
    }
    /**
     * <code>repeated string allocation_tags = 10;</code>
     * @param index The index of the element to return.
     * @return The allocationTags at the given index.
     */
    public java.lang.String getAllocationTags(int index) {
      return allocationTags_.get(index);
    }
    /**
     * <code>repeated string allocation_tags = 10;</code>
     * @param index The index of the value to return.
     * @return The bytes of the allocationTags at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAllocationTagsBytes(int index) {
      return allocationTags_.getByteString(index);
    }

    public static final int EXPOSED_PORTS_FIELD_NUMBER = 11;
    @SuppressWarnings("serial")
    private volatile java.lang.Object exposedPorts_ = "";
    /**
     * <code>optional string exposed_ports = 11;</code>
     * @return Whether the exposedPorts field is set.
     */
    @java.lang.Override
    public boolean hasExposedPorts() {
      return ((bitField0_ & 0x00000200) != 0);
    }
    /**
     * <code>optional string exposed_ports = 11;</code>
     * @return The exposedPorts.
     */
    @java.lang.Override
    public java.lang.String getExposedPorts() {
      java.lang.Object ref = exposedPorts_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          exposedPorts_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string exposed_ports = 11;</code>
     * @return The bytes for exposedPorts.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getExposedPortsBytes() {
      java.lang.Object ref = exposedPorts_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        exposedPorts_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasResource()) {
        if (!getResource().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasContainerToken()) {
        if (!getContainerToken().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getNodeId());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, nodeHttpAddress_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(4, getResource());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeMessage(5, getPriority());
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeMessage(6, getContainerToken());
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeEnum(7, executionType_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeInt64(8, allocationRequestId_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeInt32(9, version_);
      }
      for (int i = 0; i < allocationTags_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 10, allocationTags_.getRaw(i));
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 11, exposedPorts_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getNodeId());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, nodeHttpAddress_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getResource());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(5, getPriority());
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(6, getContainerToken());
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(7, executionType_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(8, allocationRequestId_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(9, version_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < allocationTags_.size(); i++) {
          dataSize += computeStringSizeNoTag(allocationTags_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getAllocationTagsList().size();
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(11, exposedPorts_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto) obj;

      if (hasId() != other.hasId()) return false;
      if (hasId()) {
        if (!getId()
            .equals(other.getId())) return false;
      }
      if (hasNodeId() != other.hasNodeId()) return false;
      if (hasNodeId()) {
        if (!getNodeId()
            .equals(other.getNodeId())) return false;
      }
      if (hasNodeHttpAddress() != other.hasNodeHttpAddress()) return false;
      if (hasNodeHttpAddress()) {
        if (!getNodeHttpAddress()
            .equals(other.getNodeHttpAddress())) return false;
      }
      if (hasResource() != other.hasResource()) return false;
      if (hasResource()) {
        if (!getResource()
            .equals(other.getResource())) return false;
      }
      if (hasPriority() != other.hasPriority()) return false;
      if (hasPriority()) {
        if (!getPriority()
            .equals(other.getPriority())) return false;
      }
      if (hasContainerToken() != other.hasContainerToken()) return false;
      if (hasContainerToken()) {
        if (!getContainerToken()
            .equals(other.getContainerToken())) return false;
      }
      if (hasExecutionType() != other.hasExecutionType()) return false;
      if (hasExecutionType()) {
        if (executionType_ != other.executionType_) return false;
      }
      if (hasAllocationRequestId() != other.hasAllocationRequestId()) return false;
      if (hasAllocationRequestId()) {
        if (getAllocationRequestId()
            != other.getAllocationRequestId()) return false;
      }
      if (hasVersion() != other.hasVersion()) return false;
      if (hasVersion()) {
        if (getVersion()
            != other.getVersion()) return false;
      }
      if (!getAllocationTagsList()
          .equals(other.getAllocationTagsList())) return false;
      if (hasExposedPorts() != other.hasExposedPorts()) return false;
      if (hasExposedPorts()) {
        if (!getExposedPorts()
            .equals(other.getExposedPorts())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + getId().hashCode();
      }
      if (hasNodeId()) {
        hash = (37 * hash) + NODEID_FIELD_NUMBER;
        hash = (53 * hash) + getNodeId().hashCode();
      }
      if (hasNodeHttpAddress()) {
        hash = (37 * hash) + NODE_HTTP_ADDRESS_FIELD_NUMBER;
        hash = (53 * hash) + getNodeHttpAddress().hashCode();
      }
      if (hasResource()) {
        hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getResource().hashCode();
      }
      if (hasPriority()) {
        hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getPriority().hashCode();
      }
      if (hasContainerToken()) {
        hash = (37 * hash) + CONTAINER_TOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getContainerToken().hashCode();
      }
      if (hasExecutionType()) {
        hash = (37 * hash) + EXECUTION_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + executionType_;
      }
      if (hasAllocationRequestId()) {
        hash = (37 * hash) + ALLOCATION_REQUEST_ID_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAllocationRequestId());
      }
      if (hasVersion()) {
        hash = (37 * hash) + VERSION_FIELD_NUMBER;
        hash = (53 * hash) + getVersion();
      }
      if (getAllocationTagsCount() > 0) {
        hash = (37 * hash) + ALLOCATION_TAGS_FIELD_NUMBER;
        hash = (53 * hash) + getAllocationTagsList().hashCode();
      }
      if (hasExposedPorts()) {
        hash = (37 * hash) + EXPOSED_PORTS_FIELD_NUMBER;
        hash = (53 * hash) + getExposedPorts().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getIdFieldBuilder();
          getNodeIdFieldBuilder();
          getResourceFieldBuilder();
          getPriorityFieldBuilder();
          getContainerTokenFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        id_ = null;
        if (idBuilder_ != null) {
          idBuilder_.dispose();
          idBuilder_ = null;
        }
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        nodeHttpAddress_ = "";
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        containerToken_ = null;
        if (containerTokenBuilder_ != null) {
          containerTokenBuilder_.dispose();
          containerTokenBuilder_ = null;
        }
        executionType_ = 1;
        allocationRequestId_ = -1L;
        version_ = 0;
        allocationTags_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        exposedPorts_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.id_ = idBuilder_ == null
              ? id_
              : idBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.nodeId_ = nodeIdBuilder_ == null
              ? nodeId_
              : nodeIdBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.nodeHttpAddress_ = nodeHttpAddress_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.resource_ = resourceBuilder_ == null
              ? resource_
              : resourceBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.priority_ = priorityBuilder_ == null
              ? priority_
              : priorityBuilder_.build();
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.containerToken_ = containerTokenBuilder_ == null
              ? containerToken_
              : containerTokenBuilder_.build();
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.executionType_ = executionType_;
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.allocationRequestId_ = allocationRequestId_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.version_ = version_;
          to_bitField0_ |= 0x00000100;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          allocationTags_.makeImmutable();
          result.allocationTags_ = allocationTags_;
        }
        if (((from_bitField0_ & 0x00000400) != 0)) {
          result.exposedPorts_ = exposedPorts_;
          to_bitField0_ |= 0x00000200;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto.getDefaultInstance()) return this;
        if (other.hasId()) {
          mergeId(other.getId());
        }
        if (other.hasNodeId()) {
          mergeNodeId(other.getNodeId());
        }
        if (other.hasNodeHttpAddress()) {
          nodeHttpAddress_ = other.nodeHttpAddress_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasResource()) {
          mergeResource(other.getResource());
        }
        if (other.hasPriority()) {
          mergePriority(other.getPriority());
        }
        if (other.hasContainerToken()) {
          mergeContainerToken(other.getContainerToken());
        }
        if (other.hasExecutionType()) {
          setExecutionType(other.getExecutionType());
        }
        if (other.hasAllocationRequestId()) {
          setAllocationRequestId(other.getAllocationRequestId());
        }
        if (other.hasVersion()) {
          setVersion(other.getVersion());
        }
        if (!other.allocationTags_.isEmpty()) {
          if (allocationTags_.isEmpty()) {
            allocationTags_ = other.allocationTags_;
            bitField0_ |= 0x00000200;
          } else {
            ensureAllocationTagsIsMutable();
            allocationTags_.addAll(other.allocationTags_);
          }
          onChanged();
        }
        if (other.hasExposedPorts()) {
          exposedPorts_ = other.exposedPorts_;
          bitField0_ |= 0x00000400;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasResource()) {
          if (!getResource().isInitialized()) {
            return false;
          }
        }
        if (hasContainerToken()) {
          if (!getContainerToken().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getNodeIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                nodeHttpAddress_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                input.readMessage(
                    getResourceFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                input.readMessage(
                    getPriorityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 50: {
                input.readMessage(
                    getContainerTokenFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000020;
                break;
              } // case 50
              case 56: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(7, tmpRaw);
                } else {
                  executionType_ = tmpRaw;
                  bitField0_ |= 0x00000040;
                }
                break;
              } // case 56
              case 64: {
                allocationRequestId_ = input.readInt64();
                bitField0_ |= 0x00000080;
                break;
              } // case 64
              case 72: {
                version_ = input.readInt32();
                bitField0_ |= 0x00000100;
                break;
              } // case 72
              case 82: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureAllocationTagsIsMutable();
                allocationTags_.add(bs);
                break;
              } // case 82
              case 90: {
                exposedPorts_ = input.readBytes();
                bitField0_ |= 0x00000400;
                break;
              } // case 90
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto id_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> idBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       * @return Whether the id field is set.
       */
      public boolean hasId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       * @return The id.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId() {
        if (idBuilder_ == null) {
          return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_;
        } else {
          return idBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public Builder setId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (idBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          id_ = value;
        } else {
          idBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public Builder setId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (idBuilder_ == null) {
          id_ = builderForValue.build();
        } else {
          idBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public Builder mergeId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (idBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            id_ != null &&
            id_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getIdBuilder().mergeFrom(value);
          } else {
            id_ = value;
          }
        } else {
          idBuilder_.mergeFrom(value);
        }
        if (id_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public Builder clearId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        id_ = null;
        if (idBuilder_ != null) {
          idBuilder_.dispose();
          idBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder() {
        if (idBuilder_ != null) {
          return idBuilder_.getMessageOrBuilder();
        } else {
          return id_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getIdFieldBuilder() {
        if (idBuilder_ == null) {
          idBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getId(),
                  getParentForChildren(),
                  isClean());
          id_ = null;
        }
        return idBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       * @return Whether the nodeId field is set.
       */
      public boolean hasNodeId() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       * @return The nodeId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
        if (nodeIdBuilder_ == null) {
          return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        } else {
          return nodeIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeId_ = value;
        } else {
          nodeIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder setNodeId(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (nodeIdBuilder_ == null) {
          nodeId_ = builderForValue.build();
        } else {
          nodeIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            nodeId_ != null &&
            nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
            getNodeIdBuilder().mergeFrom(value);
          } else {
            nodeId_ = value;
          }
        } else {
          nodeIdBuilder_.mergeFrom(value);
        }
        if (nodeId_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder clearNodeId() {
        bitField0_ = (bitField0_ & ~0x00000002);
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getNodeIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
        if (nodeIdBuilder_ != null) {
          return nodeIdBuilder_.getMessageOrBuilder();
        } else {
          return nodeId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
          getNodeIdFieldBuilder() {
        if (nodeIdBuilder_ == null) {
          nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
                  getNodeId(),
                  getParentForChildren(),
                  isClean());
          nodeId_ = null;
        }
        return nodeIdBuilder_;
      }

      private java.lang.Object nodeHttpAddress_ = "";
      /**
       * <code>optional string node_http_address = 3;</code>
       * @return Whether the nodeHttpAddress field is set.
       */
      public boolean hasNodeHttpAddress() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string node_http_address = 3;</code>
       * @return The nodeHttpAddress.
       */
      public java.lang.String getNodeHttpAddress() {
        java.lang.Object ref = nodeHttpAddress_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            nodeHttpAddress_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string node_http_address = 3;</code>
       * @return The bytes for nodeHttpAddress.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNodeHttpAddressBytes() {
        java.lang.Object ref = nodeHttpAddress_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          nodeHttpAddress_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string node_http_address = 3;</code>
       * @param value The nodeHttpAddress to set.
       * @return This builder for chaining.
       */
      public Builder setNodeHttpAddress(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        nodeHttpAddress_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string node_http_address = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeHttpAddress() {
        nodeHttpAddress_ = getDefaultInstance().getNodeHttpAddress();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string node_http_address = 3;</code>
       * @param value The bytes for nodeHttpAddress to set.
       * @return This builder for chaining.
       */
      public Builder setNodeHttpAddressBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        nodeHttpAddress_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
       * @return Whether the resource field is set.
       */
      public boolean hasResource() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
       * @return The resource.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
        if (resourceBuilder_ == null) {
          return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
        } else {
          return resourceBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
       */
      public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resource_ = value;
        } else {
          resourceBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
       */
      public Builder setResource(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          resource_ = builderForValue.build();
        } else {
          resourceBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
       */
      public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourceBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
            resource_ != null &&
            resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getResourceBuilder().mergeFrom(value);
          } else {
            resource_ = value;
          }
        } else {
          resourceBuilder_.mergeFrom(value);
        }
        if (resource_ != null) {
          bitField0_ |= 0x00000008;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
       */
      public Builder clearResource() {
        bitField0_ = (bitField0_ & ~0x00000008);
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getResourceFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
        if (resourceBuilder_ != null) {
          return resourceBuilder_.getMessageOrBuilder();
        } else {
          return resource_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 4;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getResourceFieldBuilder() {
        if (resourceBuilder_ == null) {
          resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getResource(),
                  getParentForChildren(),
                  isClean());
          resource_ = null;
        }
        return resourceBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
       * @return Whether the priority field is set.
       */
      public boolean hasPriority() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
       * @return The priority.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
        if (priorityBuilder_ == null) {
          return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        } else {
          return priorityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
       */
      public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          priority_ = value;
        } else {
          priorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
       */
      public Builder setPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (priorityBuilder_ == null) {
          priority_ = builderForValue.build();
        } else {
          priorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
       */
      public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (((bitField0_ & 0x00000010) != 0) &&
            priority_ != null &&
            priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            getPriorityBuilder().mergeFrom(value);
          } else {
            priority_ = value;
          }
        } else {
          priorityBuilder_.mergeFrom(value);
        }
        if (priority_ != null) {
          bitField0_ |= 0x00000010;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
       */
      public Builder clearPriority() {
        bitField0_ = (bitField0_ & ~0x00000010);
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
        bitField0_ |= 0x00000010;
        onChanged();
        return getPriorityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
        if (priorityBuilder_ != null) {
          return priorityBuilder_.getMessageOrBuilder();
        } else {
          return priority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 5;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getPriorityFieldBuilder() {
        if (priorityBuilder_ == null) {
          priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getPriority(),
                  getParentForChildren(),
                  isClean());
          priority_ = null;
        }
        return priorityBuilder_;
      }

      private org.apache.hadoop.security.proto.SecurityProtos.TokenProto containerToken_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> containerTokenBuilder_;
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
       * @return Whether the containerToken field is set.
       */
      public boolean hasContainerToken() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
       * @return The containerToken.
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getContainerToken() {
        if (containerTokenBuilder_ == null) {
          return containerToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_;
        } else {
          return containerTokenBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
       */
      public Builder setContainerToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (containerTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerToken_ = value;
        } else {
          containerTokenBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
       */
      public Builder setContainerToken(
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (containerTokenBuilder_ == null) {
          containerToken_ = builderForValue.build();
        } else {
          containerTokenBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
       */
      public Builder mergeContainerToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (containerTokenBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0) &&
            containerToken_ != null &&
            containerToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) {
            getContainerTokenBuilder().mergeFrom(value);
          } else {
            containerToken_ = value;
          }
        } else {
          containerTokenBuilder_.mergeFrom(value);
        }
        if (containerToken_ != null) {
          bitField0_ |= 0x00000020;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
       */
      public Builder clearContainerToken() {
        bitField0_ = (bitField0_ & ~0x00000020);
        containerToken_ = null;
        if (containerTokenBuilder_ != null) {
          containerTokenBuilder_.dispose();
          containerTokenBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getContainerTokenBuilder() {
        bitField0_ |= 0x00000020;
        onChanged();
        return getContainerTokenFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getContainerTokenOrBuilder() {
        if (containerTokenBuilder_ != null) {
          return containerTokenBuilder_.getMessageOrBuilder();
        } else {
          return containerToken_ == null ?
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : containerToken_;
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto container_token = 6;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
          getContainerTokenFieldBuilder() {
        if (containerTokenBuilder_ == null) {
          containerTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>(
                  getContainerToken(),
                  getParentForChildren(),
                  isClean());
          containerToken_ = null;
        }
        return containerTokenBuilder_;
      }

      private int executionType_ = 1;
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED];</code>
       * @return Whether the executionType field is set.
       */
      @java.lang.Override public boolean hasExecutionType() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED];</code>
       * @return The executionType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED];</code>
       * @param value The executionType to set.
       * @return This builder for chaining.
       */
      public Builder setExecutionType(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000040;
        executionType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 7 [default = GUARANTEED];</code>
       * @return This builder for chaining.
       */
      public Builder clearExecutionType() {
        bitField0_ = (bitField0_ & ~0x00000040);
        executionType_ = 1;
        onChanged();
        return this;
      }

      private long allocationRequestId_ = -1L;
      /**
       * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
       * @return Whether the allocationRequestId field is set.
       */
      @java.lang.Override
      public boolean hasAllocationRequestId() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
       * @return The allocationRequestId.
       */
      @java.lang.Override
      public long getAllocationRequestId() {
        return allocationRequestId_;
      }
      /**
       * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
       * @param value The allocationRequestId to set.
       * @return This builder for chaining.
       */
      public Builder setAllocationRequestId(long value) {

        allocationRequestId_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
       * @return This builder for chaining.
       */
      public Builder clearAllocationRequestId() {
        bitField0_ = (bitField0_ & ~0x00000080);
        allocationRequestId_ = -1L;
        onChanged();
        return this;
      }

      private int version_ ;
      /**
       * <code>optional int32 version = 9 [default = 0];</code>
       * @return Whether the version field is set.
       */
      @java.lang.Override
      public boolean hasVersion() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional int32 version = 9 [default = 0];</code>
       * @return The version.
       */
      @java.lang.Override
      public int getVersion() {
        return version_;
      }
      /**
       * <code>optional int32 version = 9 [default = 0];</code>
       * @param value The version to set.
       * @return This builder for chaining.
       */
      public Builder setVersion(int value) {

        version_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 version = 9 [default = 0];</code>
       * @return This builder for chaining.
       */
      public Builder clearVersion() {
        bitField0_ = (bitField0_ & ~0x00000100);
        version_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList allocationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureAllocationTagsIsMutable() {
        if (!allocationTags_.isModifiable()) {
          allocationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(allocationTags_);
        }
        bitField0_ |= 0x00000200;
      }
      /**
       * <code>repeated string allocation_tags = 10;</code>
       * @return A list containing the allocationTags.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getAllocationTagsList() {
        allocationTags_.makeImmutable();
        return allocationTags_;
      }
      /**
       * <code>repeated string allocation_tags = 10;</code>
       * @return The count of allocationTags.
       */
      public int getAllocationTagsCount() {
        return allocationTags_.size();
      }
      /**
       * <code>repeated string allocation_tags = 10;</code>
       * @param index The index of the element to return.
       * @return The allocationTags at the given index.
       */
      public java.lang.String getAllocationTags(int index) {
        return allocationTags_.get(index);
      }
      /**
       * <code>repeated string allocation_tags = 10;</code>
       * @param index The index of the value to return.
       * @return The bytes of the allocationTags at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAllocationTagsBytes(int index) {
        return allocationTags_.getByteString(index);
      }
      /**
       * <code>repeated string allocation_tags = 10;</code>
       * @param index The index to set the value at.
       * @param value The allocationTags to set.
       * @return This builder for chaining.
       */
      public Builder setAllocationTags(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAllocationTagsIsMutable();
        allocationTags_.set(index, value);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocation_tags = 10;</code>
       * @param value The allocationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllocationTags(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAllocationTagsIsMutable();
        allocationTags_.add(value);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocation_tags = 10;</code>
       * @param values The allocationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllAllocationTags(
          java.lang.Iterable<java.lang.String> values) {
        ensureAllocationTagsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, allocationTags_);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocation_tags = 10;</code>
       * @return This builder for chaining.
       */
      public Builder clearAllocationTags() {
        allocationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000200);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocation_tags = 10;</code>
       * @param value The bytes of the allocationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllocationTagsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAllocationTagsIsMutable();
        allocationTags_.add(value);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }

      private java.lang.Object exposedPorts_ = "";
      /**
       * <code>optional string exposed_ports = 11;</code>
       * @return Whether the exposedPorts field is set.
       */
      public boolean hasExposedPorts() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * <code>optional string exposed_ports = 11;</code>
       * @return The exposedPorts.
       */
      public java.lang.String getExposedPorts() {
        java.lang.Object ref = exposedPorts_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            exposedPorts_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string exposed_ports = 11;</code>
       * @return The bytes for exposedPorts.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getExposedPortsBytes() {
        java.lang.Object ref = exposedPorts_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          exposedPorts_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string exposed_ports = 11;</code>
       * @param value The exposedPorts to set.
       * @return This builder for chaining.
       */
      public Builder setExposedPorts(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        exposedPorts_ = value;
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional string exposed_ports = 11;</code>
       * @return This builder for chaining.
       */
      public Builder clearExposedPorts() {
        exposedPorts_ = getDefaultInstance().getExposedPorts();
        bitField0_ = (bitField0_ & ~0x00000400);
        onChanged();
        return this;
      }
      /**
       * <code>optional string exposed_ports = 11;</code>
       * @param value The bytes for exposedPorts to set.
       * @return This builder for chaining.
       */
      public Builder setExposedPortsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        exposedPorts_ = value;
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerProto>() {
      @java.lang.Override
      public ContainerProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerReportProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerReportProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    boolean hasContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
     * @return Whether the resource field is set.
     */
    boolean hasResource();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
     * @return The resource.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder();

    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
     * @return Whether the nodeId field is set.
     */
    boolean hasNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
     * @return The nodeId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();

    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     * @return Whether the priority field is set.
     */
    boolean hasPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     * @return The priority.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();

    /**
     * <code>optional int64 creation_time = 5;</code>
     * @return Whether the creationTime field is set.
     */
    boolean hasCreationTime();
    /**
     * <code>optional int64 creation_time = 5;</code>
     * @return The creationTime.
     */
    long getCreationTime();

    /**
     * <code>optional int64 finish_time = 6;</code>
     * @return Whether the finishTime field is set.
     */
    boolean hasFinishTime();
    /**
     * <code>optional int64 finish_time = 6;</code>
     * @return The finishTime.
     */
    long getFinishTime();

    /**
     * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
     * @return Whether the diagnosticsInfo field is set.
     */
    boolean hasDiagnosticsInfo();
    /**
     * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
     * @return The diagnosticsInfo.
     */
    java.lang.String getDiagnosticsInfo();
    /**
     * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
     * @return The bytes for diagnosticsInfo.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes();

    /**
     * <code>optional string log_url = 8;</code>
     * @return Whether the logUrl field is set.
     */
    boolean hasLogUrl();
    /**
     * <code>optional string log_url = 8;</code>
     * @return The logUrl.
     */
    java.lang.String getLogUrl();
    /**
     * <code>optional string log_url = 8;</code>
     * @return The bytes for logUrl.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getLogUrlBytes();

    /**
     * <code>optional int32 container_exit_status = 9;</code>
     * @return Whether the containerExitStatus field is set.
     */
    boolean hasContainerExitStatus();
    /**
     * <code>optional int32 container_exit_status = 9;</code>
     * @return The containerExitStatus.
     */
    int getContainerExitStatus();

    /**
     * <code>optional .hadoop.yarn.ContainerStateProto container_state = 10;</code>
     * @return Whether the containerState field is set.
     */
    boolean hasContainerState();
    /**
     * <code>optional .hadoop.yarn.ContainerStateProto container_state = 10;</code>
     * @return The containerState.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState();

    /**
     * <code>optional string node_http_address = 11;</code>
     * @return Whether the nodeHttpAddress field is set.
     */
    boolean hasNodeHttpAddress();
    /**
     * <code>optional string node_http_address = 11;</code>
     * @return The nodeHttpAddress.
     */
    java.lang.String getNodeHttpAddress();
    /**
     * <code>optional string node_http_address = 11;</code>
     * @return The bytes for nodeHttpAddress.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeHttpAddressBytes();

    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED];</code>
     * @return Whether the executionType field is set.
     */
    boolean hasExecutionType();
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED];</code>
     * @return The executionType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType();

    /**
     * <code>optional string exposed_ports = 13;</code>
     * @return Whether the exposedPorts field is set.
     */
    boolean hasExposedPorts();
    /**
     * <code>optional string exposed_ports = 13;</code>
     * @return The exposedPorts.
     */
    java.lang.String getExposedPorts();
    /**
     * <code>optional string exposed_ports = 13;</code>
     * @return The bytes for exposedPorts.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getExposedPortsBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerReportProto}
   */
  public static final class ContainerReportProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerReportProto)
      ContainerReportProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerReportProto.newBuilder() to construct.
    private ContainerReportProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerReportProto() {
      diagnosticsInfo_ = "N/A";
      logUrl_ = "";
      containerState_ = 1;
      nodeHttpAddress_ = "";
      executionType_ = 1;
      exposedPorts_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerReportProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerReportProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerReportProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    @java.lang.Override
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int RESOURCE_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
     * @return Whether the resource field is set.
     */
    @java.lang.Override
    public boolean hasResource() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
     * @return The resource.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
    }

    public static final int NODE_ID_FIELD_NUMBER = 3;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
     * @return Whether the nodeId field is set.
     */
    @java.lang.Override
    public boolean hasNodeId() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
     * @return The nodeId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }

    public static final int PRIORITY_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     * @return Whether the priority field is set.
     */
    @java.lang.Override
    public boolean hasPriority() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     * @return The priority.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }

    public static final int CREATION_TIME_FIELD_NUMBER = 5;
    private long creationTime_ = 0L;
    /**
     * <code>optional int64 creation_time = 5;</code>
     * @return Whether the creationTime field is set.
     */
    @java.lang.Override
    public boolean hasCreationTime() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional int64 creation_time = 5;</code>
     * @return The creationTime.
     */
    @java.lang.Override
    public long getCreationTime() {
      return creationTime_;
    }

    public static final int FINISH_TIME_FIELD_NUMBER = 6;
    private long finishTime_ = 0L;
    /**
     * <code>optional int64 finish_time = 6;</code>
     * @return Whether the finishTime field is set.
     */
    @java.lang.Override
    public boolean hasFinishTime() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional int64 finish_time = 6;</code>
     * @return The finishTime.
     */
    @java.lang.Override
    public long getFinishTime() {
      return finishTime_;
    }

    public static final int DIAGNOSTICS_INFO_FIELD_NUMBER = 7;
    @SuppressWarnings("serial")
    private volatile java.lang.Object diagnosticsInfo_ = "N/A";
    /**
     * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
     * @return Whether the diagnosticsInfo field is set.
     */
    @java.lang.Override
    public boolean hasDiagnosticsInfo() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
     * @return The diagnosticsInfo.
     */
    @java.lang.Override
    public java.lang.String getDiagnosticsInfo() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnosticsInfo_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
     * @return The bytes for diagnosticsInfo.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsInfoBytes() {
      java.lang.Object ref = diagnosticsInfo_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnosticsInfo_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int LOG_URL_FIELD_NUMBER = 8;
    @SuppressWarnings("serial")
    private volatile java.lang.Object logUrl_ = "";
    /**
     * <code>optional string log_url = 8;</code>
     * @return Whether the logUrl field is set.
     */
    @java.lang.Override
    public boolean hasLogUrl() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional string log_url = 8;</code>
     * @return The logUrl.
     */
    @java.lang.Override
    public java.lang.String getLogUrl() {
      java.lang.Object ref = logUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          logUrl_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string log_url = 8;</code>
     * @return The bytes for logUrl.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getLogUrlBytes() {
      java.lang.Object ref = logUrl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        logUrl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int CONTAINER_EXIT_STATUS_FIELD_NUMBER = 9;
    private int containerExitStatus_ = 0;
    /**
     * <code>optional int32 container_exit_status = 9;</code>
     * @return Whether the containerExitStatus field is set.
     */
    @java.lang.Override
    public boolean hasContainerExitStatus() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional int32 container_exit_status = 9;</code>
     * @return The containerExitStatus.
     */
    @java.lang.Override
    public int getContainerExitStatus() {
      return containerExitStatus_;
    }

    public static final int CONTAINER_STATE_FIELD_NUMBER = 10;
    private int containerState_ = 1;
    /**
     * <code>optional .hadoop.yarn.ContainerStateProto container_state = 10;</code>
     * @return Whether the containerState field is set.
     */
    @java.lang.Override public boolean hasContainerState() {
      return ((bitField0_ & 0x00000200) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerStateProto container_state = 10;</code>
     * @return The containerState.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState() {
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(containerState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result;
    }

    public static final int NODE_HTTP_ADDRESS_FIELD_NUMBER = 11;
    @SuppressWarnings("serial")
    private volatile java.lang.Object nodeHttpAddress_ = "";
    /**
     * <code>optional string node_http_address = 11;</code>
     * @return Whether the nodeHttpAddress field is set.
     */
    @java.lang.Override
    public boolean hasNodeHttpAddress() {
      return ((bitField0_ & 0x00000400) != 0);
    }
    /**
     * <code>optional string node_http_address = 11;</code>
     * @return The nodeHttpAddress.
     */
    @java.lang.Override
    public java.lang.String getNodeHttpAddress() {
      java.lang.Object ref = nodeHttpAddress_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          nodeHttpAddress_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string node_http_address = 11;</code>
     * @return The bytes for nodeHttpAddress.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeHttpAddressBytes() {
      java.lang.Object ref = nodeHttpAddress_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        nodeHttpAddress_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int EXECUTIONTYPE_FIELD_NUMBER = 12;
    private int executionType_ = 1;
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED];</code>
     * @return Whether the executionType field is set.
     */
    @java.lang.Override public boolean hasExecutionType() {
      return ((bitField0_ & 0x00000800) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED];</code>
     * @return The executionType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
    }

    public static final int EXPOSED_PORTS_FIELD_NUMBER = 13;
    @SuppressWarnings("serial")
    private volatile java.lang.Object exposedPorts_ = "";
    /**
     * <code>optional string exposed_ports = 13;</code>
     * @return Whether the exposedPorts field is set.
     */
    @java.lang.Override
    public boolean hasExposedPorts() {
      return ((bitField0_ & 0x00001000) != 0);
    }
    /**
     * <code>optional string exposed_ports = 13;</code>
     * @return The exposedPorts.
     */
    @java.lang.Override
    public java.lang.String getExposedPorts() {
      java.lang.Object ref = exposedPorts_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          exposedPorts_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string exposed_ports = 13;</code>
     * @return The bytes for exposedPorts.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getExposedPortsBytes() {
      java.lang.Object ref = exposedPorts_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        exposedPorts_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasResource()) {
        if (!getResource().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getResource());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeMessage(3, getNodeId());
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(4, getPriority());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeInt64(5, creationTime_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt64(6, finishTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 7, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, logUrl_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeInt32(9, containerExitStatus_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        output.writeEnum(10, containerState_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 11, nodeHttpAddress_);
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        output.writeEnum(12, executionType_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 13, exposedPorts_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getResource());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, getNodeId());
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getPriority());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(5, creationTime_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(6, finishTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(7, diagnosticsInfo_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(8, logUrl_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(9, containerExitStatus_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(10, containerState_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(11, nodeHttpAddress_);
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(12, executionType_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(13, exposedPorts_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (hasResource() != other.hasResource()) return false;
      if (hasResource()) {
        if (!getResource()
            .equals(other.getResource())) return false;
      }
      if (hasNodeId() != other.hasNodeId()) return false;
      if (hasNodeId()) {
        if (!getNodeId()
            .equals(other.getNodeId())) return false;
      }
      if (hasPriority() != other.hasPriority()) return false;
      if (hasPriority()) {
        if (!getPriority()
            .equals(other.getPriority())) return false;
      }
      if (hasCreationTime() != other.hasCreationTime()) return false;
      if (hasCreationTime()) {
        if (getCreationTime()
            != other.getCreationTime()) return false;
      }
      if (hasFinishTime() != other.hasFinishTime()) return false;
      if (hasFinishTime()) {
        if (getFinishTime()
            != other.getFinishTime()) return false;
      }
      if (hasDiagnosticsInfo() != other.hasDiagnosticsInfo()) return false;
      if (hasDiagnosticsInfo()) {
        if (!getDiagnosticsInfo()
            .equals(other.getDiagnosticsInfo())) return false;
      }
      if (hasLogUrl() != other.hasLogUrl()) return false;
      if (hasLogUrl()) {
        if (!getLogUrl()
            .equals(other.getLogUrl())) return false;
      }
      if (hasContainerExitStatus() != other.hasContainerExitStatus()) return false;
      if (hasContainerExitStatus()) {
        if (getContainerExitStatus()
            != other.getContainerExitStatus()) return false;
      }
      if (hasContainerState() != other.hasContainerState()) return false;
      if (hasContainerState()) {
        if (containerState_ != other.containerState_) return false;
      }
      if (hasNodeHttpAddress() != other.hasNodeHttpAddress()) return false;
      if (hasNodeHttpAddress()) {
        if (!getNodeHttpAddress()
            .equals(other.getNodeHttpAddress())) return false;
      }
      if (hasExecutionType() != other.hasExecutionType()) return false;
      if (hasExecutionType()) {
        if (executionType_ != other.executionType_) return false;
      }
      if (hasExposedPorts() != other.hasExposedPorts()) return false;
      if (hasExposedPorts()) {
        if (!getExposedPorts()
            .equals(other.getExposedPorts())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (hasResource()) {
        hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getResource().hashCode();
      }
      if (hasNodeId()) {
        hash = (37 * hash) + NODE_ID_FIELD_NUMBER;
        hash = (53 * hash) + getNodeId().hashCode();
      }
      if (hasPriority()) {
        hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getPriority().hashCode();
      }
      if (hasCreationTime()) {
        hash = (37 * hash) + CREATION_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getCreationTime());
      }
      if (hasFinishTime()) {
        hash = (37 * hash) + FINISH_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFinishTime());
      }
      if (hasDiagnosticsInfo()) {
        hash = (37 * hash) + DIAGNOSTICS_INFO_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnosticsInfo().hashCode();
      }
      if (hasLogUrl()) {
        hash = (37 * hash) + LOG_URL_FIELD_NUMBER;
        hash = (53 * hash) + getLogUrl().hashCode();
      }
      if (hasContainerExitStatus()) {
        hash = (37 * hash) + CONTAINER_EXIT_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + getContainerExitStatus();
      }
      if (hasContainerState()) {
        hash = (37 * hash) + CONTAINER_STATE_FIELD_NUMBER;
        hash = (53 * hash) + containerState_;
      }
      if (hasNodeHttpAddress()) {
        hash = (37 * hash) + NODE_HTTP_ADDRESS_FIELD_NUMBER;
        hash = (53 * hash) + getNodeHttpAddress().hashCode();
      }
      if (hasExecutionType()) {
        hash = (37 * hash) + EXECUTIONTYPE_FIELD_NUMBER;
        hash = (53 * hash) + executionType_;
      }
      if (hasExposedPorts()) {
        hash = (37 * hash) + EXPOSED_PORTS_FIELD_NUMBER;
        hash = (53 * hash) + getExposedPorts().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerReportProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerReportProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerReportProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerReportProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
          getResourceFieldBuilder();
          getNodeIdFieldBuilder();
          getPriorityFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        creationTime_ = 0L;
        finishTime_ = 0L;
        diagnosticsInfo_ = "N/A";
        logUrl_ = "";
        containerExitStatus_ = 0;
        containerState_ = 1;
        nodeHttpAddress_ = "";
        executionType_ = 1;
        exposedPorts_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerReportProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerId_ = containerIdBuilder_ == null
              ? containerId_
              : containerIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.resource_ = resourceBuilder_ == null
              ? resource_
              : resourceBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.nodeId_ = nodeIdBuilder_ == null
              ? nodeId_
              : nodeIdBuilder_.build();
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.priority_ = priorityBuilder_ == null
              ? priority_
              : priorityBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.creationTime_ = creationTime_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.finishTime_ = finishTime_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.diagnosticsInfo_ = diagnosticsInfo_;
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.logUrl_ = logUrl_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.containerExitStatus_ = containerExitStatus_;
          to_bitField0_ |= 0x00000100;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          result.containerState_ = containerState_;
          to_bitField0_ |= 0x00000200;
        }
        if (((from_bitField0_ & 0x00000400) != 0)) {
          result.nodeHttpAddress_ = nodeHttpAddress_;
          to_bitField0_ |= 0x00000400;
        }
        if (((from_bitField0_ & 0x00000800) != 0)) {
          result.executionType_ = executionType_;
          to_bitField0_ |= 0x00000800;
        }
        if (((from_bitField0_ & 0x00001000) != 0)) {
          result.exposedPorts_ = exposedPorts_;
          to_bitField0_ |= 0x00001000;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (other.hasResource()) {
          mergeResource(other.getResource());
        }
        if (other.hasNodeId()) {
          mergeNodeId(other.getNodeId());
        }
        if (other.hasPriority()) {
          mergePriority(other.getPriority());
        }
        if (other.hasCreationTime()) {
          setCreationTime(other.getCreationTime());
        }
        if (other.hasFinishTime()) {
          setFinishTime(other.getFinishTime());
        }
        if (other.hasDiagnosticsInfo()) {
          diagnosticsInfo_ = other.diagnosticsInfo_;
          bitField0_ |= 0x00000040;
          onChanged();
        }
        if (other.hasLogUrl()) {
          logUrl_ = other.logUrl_;
          bitField0_ |= 0x00000080;
          onChanged();
        }
        if (other.hasContainerExitStatus()) {
          setContainerExitStatus(other.getContainerExitStatus());
        }
        if (other.hasContainerState()) {
          setContainerState(other.getContainerState());
        }
        if (other.hasNodeHttpAddress()) {
          nodeHttpAddress_ = other.nodeHttpAddress_;
          bitField0_ |= 0x00000400;
          onChanged();
        }
        if (other.hasExecutionType()) {
          setExecutionType(other.getExecutionType());
        }
        if (other.hasExposedPorts()) {
          exposedPorts_ = other.exposedPorts_;
          bitField0_ |= 0x00001000;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasResource()) {
          if (!getResource().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getResourceFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                input.readMessage(
                    getNodeIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                input.readMessage(
                    getPriorityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 40: {
                creationTime_ = input.readInt64();
                bitField0_ |= 0x00000010;
                break;
              } // case 40
              case 48: {
                finishTime_ = input.readInt64();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 58: {
                diagnosticsInfo_ = input.readBytes();
                bitField0_ |= 0x00000040;
                break;
              } // case 58
              case 66: {
                logUrl_ = input.readBytes();
                bitField0_ |= 0x00000080;
                break;
              } // case 66
              case 72: {
                containerExitStatus_ = input.readInt32();
                bitField0_ |= 0x00000100;
                break;
              } // case 72
              case 80: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(10, tmpRaw);
                } else {
                  containerState_ = tmpRaw;
                  bitField0_ |= 0x00000200;
                }
                break;
              } // case 80
              case 90: {
                nodeHttpAddress_ = input.readBytes();
                bitField0_ |= 0x00000400;
                break;
              } // case 90
              case 96: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(12, tmpRaw);
                } else {
                  executionType_ = tmpRaw;
                  bitField0_ |= 0x00000800;
                }
                break;
              } // case 96
              case 106: {
                exposedPorts_ = input.readBytes();
                bitField0_ |= 0x00001000;
                break;
              } // case 106
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return Whether the containerId field is set.
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return The containerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerId_ != null &&
            containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getContainerIdBuilder().mergeFrom(value);
          } else {
            containerId_ = value;
          }
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        if (containerId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
       * @return Whether the resource field is set.
       */
      public boolean hasResource() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
       * @return The resource.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
        if (resourceBuilder_ == null) {
          return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
        } else {
          return resourceBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
       */
      public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resource_ = value;
        } else {
          resourceBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
       */
      public Builder setResource(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          resource_ = builderForValue.build();
        } else {
          resourceBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
       */
      public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourceBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            resource_ != null &&
            resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getResourceBuilder().mergeFrom(value);
          } else {
            resource_ = value;
          }
        } else {
          resourceBuilder_.mergeFrom(value);
        }
        if (resource_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
       */
      public Builder clearResource() {
        bitField0_ = (bitField0_ & ~0x00000002);
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getResourceFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
        if (resourceBuilder_ != null) {
          return resourceBuilder_.getMessageOrBuilder();
        } else {
          return resource_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getResourceFieldBuilder() {
        if (resourceBuilder_ == null) {
          resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getResource(),
                  getParentForChildren(),
                  isClean());
          resource_ = null;
        }
        return resourceBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
       * @return Whether the nodeId field is set.
       */
      public boolean hasNodeId() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
       * @return The nodeId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
        if (nodeIdBuilder_ == null) {
          return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        } else {
          return nodeIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
       */
      public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeId_ = value;
        } else {
          nodeIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
       */
      public Builder setNodeId(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (nodeIdBuilder_ == null) {
          nodeId_ = builderForValue.build();
        } else {
          nodeIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
       */
      public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0) &&
            nodeId_ != null &&
            nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
            getNodeIdBuilder().mergeFrom(value);
          } else {
            nodeId_ = value;
          }
        } else {
          nodeIdBuilder_.mergeFrom(value);
        }
        if (nodeId_ != null) {
          bitField0_ |= 0x00000004;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
       */
      public Builder clearNodeId() {
        bitField0_ = (bitField0_ & ~0x00000004);
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getNodeIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
        if (nodeIdBuilder_ != null) {
          return nodeIdBuilder_.getMessageOrBuilder();
        } else {
          return nodeId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto node_id = 3;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
          getNodeIdFieldBuilder() {
        if (nodeIdBuilder_ == null) {
          nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
                  getNodeId(),
                  getParentForChildren(),
                  isClean());
          nodeId_ = null;
        }
        return nodeIdBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       * @return Whether the priority field is set.
       */
      public boolean hasPriority() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       * @return The priority.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
        if (priorityBuilder_ == null) {
          return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        } else {
          return priorityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          priority_ = value;
        } else {
          priorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public Builder setPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (priorityBuilder_ == null) {
          priority_ = builderForValue.build();
        } else {
          priorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
            priority_ != null &&
            priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            getPriorityBuilder().mergeFrom(value);
          } else {
            priority_ = value;
          }
        } else {
          priorityBuilder_.mergeFrom(value);
        }
        if (priority_ != null) {
          bitField0_ |= 0x00000008;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public Builder clearPriority() {
        bitField0_ = (bitField0_ & ~0x00000008);
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getPriorityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
        if (priorityBuilder_ != null) {
          return priorityBuilder_.getMessageOrBuilder();
        } else {
          return priority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getPriorityFieldBuilder() {
        if (priorityBuilder_ == null) {
          priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getPriority(),
                  getParentForChildren(),
                  isClean());
          priority_ = null;
        }
        return priorityBuilder_;
      }

      private long creationTime_ ;
      /**
       * <code>optional int64 creation_time = 5;</code>
       * @return Whether the creationTime field is set.
       */
      @java.lang.Override
      public boolean hasCreationTime() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional int64 creation_time = 5;</code>
       * @return The creationTime.
       */
      @java.lang.Override
      public long getCreationTime() {
        return creationTime_;
      }
      /**
       * <code>optional int64 creation_time = 5;</code>
       * @param value The creationTime to set.
       * @return This builder for chaining.
       */
      public Builder setCreationTime(long value) {

        creationTime_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 creation_time = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearCreationTime() {
        bitField0_ = (bitField0_ & ~0x00000010);
        creationTime_ = 0L;
        onChanged();
        return this;
      }

      private long finishTime_ ;
      /**
       * <code>optional int64 finish_time = 6;</code>
       * @return Whether the finishTime field is set.
       */
      @java.lang.Override
      public boolean hasFinishTime() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional int64 finish_time = 6;</code>
       * @return The finishTime.
       */
      @java.lang.Override
      public long getFinishTime() {
        return finishTime_;
      }
      /**
       * <code>optional int64 finish_time = 6;</code>
       * @param value The finishTime to set.
       * @return This builder for chaining.
       */
      public Builder setFinishTime(long value) {

        finishTime_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 finish_time = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000020);
        finishTime_ = 0L;
        onChanged();
        return this;
      }

      private java.lang.Object diagnosticsInfo_ = "N/A";
      /**
       * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
       * @return Whether the diagnosticsInfo field is set.
       */
      public boolean hasDiagnosticsInfo() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
       * @return The diagnosticsInfo.
       */
      public java.lang.String getDiagnosticsInfo() {
        java.lang.Object ref = diagnosticsInfo_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnosticsInfo_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
       * @return The bytes for diagnosticsInfo.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsInfoBytes() {
        java.lang.Object ref = diagnosticsInfo_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnosticsInfo_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
       * @param value The diagnosticsInfo to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnosticsInfo(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        diagnosticsInfo_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
       * @return This builder for chaining.
       */
      public Builder clearDiagnosticsInfo() {
        diagnosticsInfo_ = getDefaultInstance().getDiagnosticsInfo();
        bitField0_ = (bitField0_ & ~0x00000040);
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics_info = 7 [default = "N/A"];</code>
       * @param value The bytes for diagnosticsInfo to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnosticsInfoBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        diagnosticsInfo_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }

      private java.lang.Object logUrl_ = "";
      /**
       * <code>optional string log_url = 8;</code>
       * @return Whether the logUrl field is set.
       */
      public boolean hasLogUrl() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional string log_url = 8;</code>
       * @return The logUrl.
       */
      public java.lang.String getLogUrl() {
        java.lang.Object ref = logUrl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            logUrl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string log_url = 8;</code>
       * @return The bytes for logUrl.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getLogUrlBytes() {
        java.lang.Object ref = logUrl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          logUrl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string log_url = 8;</code>
       * @param value The logUrl to set.
       * @return This builder for chaining.
       */
      public Builder setLogUrl(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        logUrl_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional string log_url = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearLogUrl() {
        logUrl_ = getDefaultInstance().getLogUrl();
        bitField0_ = (bitField0_ & ~0x00000080);
        onChanged();
        return this;
      }
      /**
       * <code>optional string log_url = 8;</code>
       * @param value The bytes for logUrl to set.
       * @return This builder for chaining.
       */
      public Builder setLogUrlBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        logUrl_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }

      private int containerExitStatus_ ;
      /**
       * <code>optional int32 container_exit_status = 9;</code>
       * @return Whether the containerExitStatus field is set.
       */
      @java.lang.Override
      public boolean hasContainerExitStatus() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional int32 container_exit_status = 9;</code>
       * @return The containerExitStatus.
       */
      @java.lang.Override
      public int getContainerExitStatus() {
        return containerExitStatus_;
      }
      /**
       * <code>optional int32 container_exit_status = 9;</code>
       * @param value The containerExitStatus to set.
       * @return This builder for chaining.
       */
      public Builder setContainerExitStatus(int value) {

        containerExitStatus_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 container_exit_status = 9;</code>
       * @return This builder for chaining.
       */
      public Builder clearContainerExitStatus() {
        bitField0_ = (bitField0_ & ~0x00000100);
        containerExitStatus_ = 0;
        onChanged();
        return this;
      }

      private int containerState_ = 1;
      /**
       * <code>optional .hadoop.yarn.ContainerStateProto container_state = 10;</code>
       * @return Whether the containerState field is set.
       */
      @java.lang.Override public boolean hasContainerState() {
        return ((bitField0_ & 0x00000200) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerStateProto container_state = 10;</code>
       * @return The containerState.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getContainerState() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(containerState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerStateProto container_state = 10;</code>
       * @param value The containerState to set.
       * @return This builder for chaining.
       */
      public Builder setContainerState(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000200;
        containerState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerStateProto container_state = 10;</code>
       * @return This builder for chaining.
       */
      public Builder clearContainerState() {
        bitField0_ = (bitField0_ & ~0x00000200);
        containerState_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object nodeHttpAddress_ = "";
      /**
       * <code>optional string node_http_address = 11;</code>
       * @return Whether the nodeHttpAddress field is set.
       */
      public boolean hasNodeHttpAddress() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * <code>optional string node_http_address = 11;</code>
       * @return The nodeHttpAddress.
       */
      public java.lang.String getNodeHttpAddress() {
        java.lang.Object ref = nodeHttpAddress_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            nodeHttpAddress_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string node_http_address = 11;</code>
       * @return The bytes for nodeHttpAddress.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNodeHttpAddressBytes() {
        java.lang.Object ref = nodeHttpAddress_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          nodeHttpAddress_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string node_http_address = 11;</code>
       * @param value The nodeHttpAddress to set.
       * @return This builder for chaining.
       */
      public Builder setNodeHttpAddress(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        nodeHttpAddress_ = value;
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional string node_http_address = 11;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeHttpAddress() {
        nodeHttpAddress_ = getDefaultInstance().getNodeHttpAddress();
        bitField0_ = (bitField0_ & ~0x00000400);
        onChanged();
        return this;
      }
      /**
       * <code>optional string node_http_address = 11;</code>
       * @param value The bytes for nodeHttpAddress to set.
       * @return This builder for chaining.
       */
      public Builder setNodeHttpAddressBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        nodeHttpAddress_ = value;
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }

      private int executionType_ = 1;
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED];</code>
       * @return Whether the executionType field is set.
       */
      @java.lang.Override public boolean hasExecutionType() {
        return ((bitField0_ & 0x00000800) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED];</code>
       * @return The executionType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED];</code>
       * @param value The executionType to set.
       * @return This builder for chaining.
       */
      public Builder setExecutionType(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000800;
        executionType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 12 [default = GUARANTEED];</code>
       * @return This builder for chaining.
       */
      public Builder clearExecutionType() {
        bitField0_ = (bitField0_ & ~0x00000800);
        executionType_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object exposedPorts_ = "";
      /**
       * <code>optional string exposed_ports = 13;</code>
       * @return Whether the exposedPorts field is set.
       */
      public boolean hasExposedPorts() {
        return ((bitField0_ & 0x00001000) != 0);
      }
      /**
       * <code>optional string exposed_ports = 13;</code>
       * @return The exposedPorts.
       */
      public java.lang.String getExposedPorts() {
        java.lang.Object ref = exposedPorts_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            exposedPorts_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string exposed_ports = 13;</code>
       * @return The bytes for exposedPorts.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getExposedPortsBytes() {
        java.lang.Object ref = exposedPorts_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          exposedPorts_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string exposed_ports = 13;</code>
       * @param value The exposedPorts to set.
       * @return This builder for chaining.
       */
      public Builder setExposedPorts(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        exposedPorts_ = value;
        bitField0_ |= 0x00001000;
        onChanged();
        return this;
      }
      /**
       * <code>optional string exposed_ports = 13;</code>
       * @return This builder for chaining.
       */
      public Builder clearExposedPorts() {
        exposedPorts_ = getDefaultInstance().getExposedPorts();
        bitField0_ = (bitField0_ & ~0x00001000);
        onChanged();
        return this;
      }
      /**
       * <code>optional string exposed_ports = 13;</code>
       * @param value The bytes for exposedPorts to set.
       * @return This builder for chaining.
       */
      public Builder setExposedPortsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        exposedPorts_ = value;
        bitField0_ |= 0x00001000;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerReportProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerReportProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerReportProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerReportProto>() {
      @java.lang.Override
      public ContainerReportProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerReportProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerReportProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerReportProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface URLProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.URLProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string scheme = 1;</code>
     * @return Whether the scheme field is set.
     */
    boolean hasScheme();
    /**
     * <code>optional string scheme = 1;</code>
     * @return The scheme.
     */
    java.lang.String getScheme();
    /**
     * <code>optional string scheme = 1;</code>
     * @return The bytes for scheme.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getSchemeBytes();

    /**
     * <code>optional string host = 2;</code>
     * @return Whether the host field is set.
     */
    boolean hasHost();
    /**
     * <code>optional string host = 2;</code>
     * @return The host.
     */
    java.lang.String getHost();
    /**
     * <code>optional string host = 2;</code>
     * @return The bytes for host.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes();

    /**
     * <code>optional int32 port = 3;</code>
     * @return Whether the port field is set.
     */
    boolean hasPort();
    /**
     * <code>optional int32 port = 3;</code>
     * @return The port.
     */
    int getPort();

    /**
     * <code>optional string file = 4;</code>
     * @return Whether the file field is set.
     */
    boolean hasFile();
    /**
     * <code>optional string file = 4;</code>
     * @return The file.
     */
    java.lang.String getFile();
    /**
     * <code>optional string file = 4;</code>
     * @return The bytes for file.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getFileBytes();

    /**
     * <code>optional string userInfo = 5;</code>
     * @return Whether the userInfo field is set.
     */
    boolean hasUserInfo();
    /**
     * <code>optional string userInfo = 5;</code>
     * @return The userInfo.
     */
    java.lang.String getUserInfo();
    /**
     * <code>optional string userInfo = 5;</code>
     * @return The bytes for userInfo.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserInfoBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.URLProto}
   */
  public static final class URLProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.URLProto)
      URLProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use URLProto.newBuilder() to construct.
    private URLProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private URLProto() {
      scheme_ = "";
      host_ = "";
      file_ = "";
      userInfo_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new URLProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_URLProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_URLProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.URLProto.class, org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder.class);
    }

    private int bitField0_;
    public static final int SCHEME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object scheme_ = "";
    /**
     * <code>optional string scheme = 1;</code>
     * @return Whether the scheme field is set.
     */
    @java.lang.Override
    public boolean hasScheme() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string scheme = 1;</code>
     * @return The scheme.
     */
    @java.lang.Override
    public java.lang.String getScheme() {
      java.lang.Object ref = scheme_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          scheme_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string scheme = 1;</code>
     * @return The bytes for scheme.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getSchemeBytes() {
      java.lang.Object ref = scheme_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        scheme_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int HOST_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object host_ = "";
    /**
     * <code>optional string host = 2;</code>
     * @return Whether the host field is set.
     */
    @java.lang.Override
    public boolean hasHost() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string host = 2;</code>
     * @return The host.
     */
    @java.lang.Override
    public java.lang.String getHost() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          host_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string host = 2;</code>
     * @return The bytes for host.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        host_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int PORT_FIELD_NUMBER = 3;
    private int port_ = 0;
    /**
     * <code>optional int32 port = 3;</code>
     * @return Whether the port field is set.
     */
    @java.lang.Override
    public boolean hasPort() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int32 port = 3;</code>
     * @return The port.
     */
    @java.lang.Override
    public int getPort() {
      return port_;
    }

    public static final int FILE_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private volatile java.lang.Object file_ = "";
    /**
     * <code>optional string file = 4;</code>
     * @return Whether the file field is set.
     */
    @java.lang.Override
    public boolean hasFile() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional string file = 4;</code>
     * @return The file.
     */
    @java.lang.Override
    public java.lang.String getFile() {
      java.lang.Object ref = file_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          file_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string file = 4;</code>
     * @return The bytes for file.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getFileBytes() {
      java.lang.Object ref = file_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        file_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int USERINFO_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private volatile java.lang.Object userInfo_ = "";
    /**
     * <code>optional string userInfo = 5;</code>
     * @return Whether the userInfo field is set.
     */
    @java.lang.Override
    public boolean hasUserInfo() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional string userInfo = 5;</code>
     * @return The userInfo.
     */
    @java.lang.Override
    public java.lang.String getUserInfo() {
      java.lang.Object ref = userInfo_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          userInfo_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string userInfo = 5;</code>
     * @return The bytes for userInfo.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserInfoBytes() {
      java.lang.Object ref = userInfo_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        userInfo_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, scheme_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, host_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt32(3, port_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, file_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, userInfo_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, scheme_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, host_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(3, port_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, file_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, userInfo_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.URLProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.URLProto other = (org.apache.hadoop.yarn.proto.YarnProtos.URLProto) obj;

      if (hasScheme() != other.hasScheme()) return false;
      if (hasScheme()) {
        if (!getScheme()
            .equals(other.getScheme())) return false;
      }
      if (hasHost() != other.hasHost()) return false;
      if (hasHost()) {
        if (!getHost()
            .equals(other.getHost())) return false;
      }
      if (hasPort() != other.hasPort()) return false;
      if (hasPort()) {
        if (getPort()
            != other.getPort()) return false;
      }
      if (hasFile() != other.hasFile()) return false;
      if (hasFile()) {
        if (!getFile()
            .equals(other.getFile())) return false;
      }
      if (hasUserInfo() != other.hasUserInfo()) return false;
      if (hasUserInfo()) {
        if (!getUserInfo()
            .equals(other.getUserInfo())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasScheme()) {
        hash = (37 * hash) + SCHEME_FIELD_NUMBER;
        hash = (53 * hash) + getScheme().hashCode();
      }
      if (hasHost()) {
        hash = (37 * hash) + HOST_FIELD_NUMBER;
        hash = (53 * hash) + getHost().hashCode();
      }
      if (hasPort()) {
        hash = (37 * hash) + PORT_FIELD_NUMBER;
        hash = (53 * hash) + getPort();
      }
      if (hasFile()) {
        hash = (37 * hash) + FILE_FIELD_NUMBER;
        hash = (53 * hash) + getFile().hashCode();
      }
      if (hasUserInfo()) {
        hash = (37 * hash) + USERINFO_FIELD_NUMBER;
        hash = (53 * hash) + getUserInfo().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.URLProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.URLProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.URLProto)
        org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_URLProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_URLProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.URLProto.class, org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.URLProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        scheme_ = "";
        host_ = "";
        port_ = 0;
        file_ = "";
        userInfo_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_URLProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.URLProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.URLProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.URLProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.URLProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.URLProto result = new org.apache.hadoop.yarn.proto.YarnProtos.URLProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.URLProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.scheme_ = scheme_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.host_ = host_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.port_ = port_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.file_ = file_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.userInfo_ = userInfo_;
          to_bitField0_ |= 0x00000010;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.URLProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.URLProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.URLProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance()) return this;
        if (other.hasScheme()) {
          scheme_ = other.scheme_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasHost()) {
          host_ = other.host_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasPort()) {
          setPort(other.getPort());
        }
        if (other.hasFile()) {
          file_ = other.file_;
          bitField0_ |= 0x00000008;
          onChanged();
        }
        if (other.hasUserInfo()) {
          userInfo_ = other.userInfo_;
          bitField0_ |= 0x00000010;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                scheme_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                host_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                port_ = input.readInt32();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 34: {
                file_ = input.readBytes();
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                userInfo_ = input.readBytes();
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object scheme_ = "";
      /**
       * <code>optional string scheme = 1;</code>
       * @return Whether the scheme field is set.
       */
      public boolean hasScheme() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string scheme = 1;</code>
       * @return The scheme.
       */
      public java.lang.String getScheme() {
        java.lang.Object ref = scheme_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            scheme_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string scheme = 1;</code>
       * @return The bytes for scheme.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getSchemeBytes() {
        java.lang.Object ref = scheme_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          scheme_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string scheme = 1;</code>
       * @param value The scheme to set.
       * @return This builder for chaining.
       */
      public Builder setScheme(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        scheme_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string scheme = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearScheme() {
        scheme_ = getDefaultInstance().getScheme();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string scheme = 1;</code>
       * @param value The bytes for scheme to set.
       * @return This builder for chaining.
       */
      public Builder setSchemeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        scheme_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object host_ = "";
      /**
       * <code>optional string host = 2;</code>
       * @return Whether the host field is set.
       */
      public boolean hasHost() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string host = 2;</code>
       * @return The host.
       */
      public java.lang.String getHost() {
        java.lang.Object ref = host_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            host_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string host = 2;</code>
       * @return The bytes for host.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHostBytes() {
        java.lang.Object ref = host_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          host_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string host = 2;</code>
       * @param value The host to set.
       * @return This builder for chaining.
       */
      public Builder setHost(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        host_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string host = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearHost() {
        host_ = getDefaultInstance().getHost();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string host = 2;</code>
       * @param value The bytes for host to set.
       * @return This builder for chaining.
       */
      public Builder setHostBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        host_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private int port_ ;
      /**
       * <code>optional int32 port = 3;</code>
       * @return Whether the port field is set.
       */
      @java.lang.Override
      public boolean hasPort() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int32 port = 3;</code>
       * @return The port.
       */
      @java.lang.Override
      public int getPort() {
        return port_;
      }
      /**
       * <code>optional int32 port = 3;</code>
       * @param value The port to set.
       * @return This builder for chaining.
       */
      public Builder setPort(int value) {

        port_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 port = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearPort() {
        bitField0_ = (bitField0_ & ~0x00000004);
        port_ = 0;
        onChanged();
        return this;
      }

      private java.lang.Object file_ = "";
      /**
       * <code>optional string file = 4;</code>
       * @return Whether the file field is set.
       */
      public boolean hasFile() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional string file = 4;</code>
       * @return The file.
       */
      public java.lang.String getFile() {
        java.lang.Object ref = file_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            file_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string file = 4;</code>
       * @return The bytes for file.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getFileBytes() {
        java.lang.Object ref = file_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          file_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string file = 4;</code>
       * @param value The file to set.
       * @return This builder for chaining.
       */
      public Builder setFile(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        file_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional string file = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearFile() {
        file_ = getDefaultInstance().getFile();
        bitField0_ = (bitField0_ & ~0x00000008);
        onChanged();
        return this;
      }
      /**
       * <code>optional string file = 4;</code>
       * @param value The bytes for file to set.
       * @return This builder for chaining.
       */
      public Builder setFileBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        file_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }

      private java.lang.Object userInfo_ = "";
      /**
       * <code>optional string userInfo = 5;</code>
       * @return Whether the userInfo field is set.
       */
      public boolean hasUserInfo() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional string userInfo = 5;</code>
       * @return The userInfo.
       */
      public java.lang.String getUserInfo() {
        java.lang.Object ref = userInfo_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            userInfo_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string userInfo = 5;</code>
       * @return The bytes for userInfo.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUserInfoBytes() {
        java.lang.Object ref = userInfo_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          userInfo_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string userInfo = 5;</code>
       * @param value The userInfo to set.
       * @return This builder for chaining.
       */
      public Builder setUserInfo(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        userInfo_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional string userInfo = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearUserInfo() {
        userInfo_ = getDefaultInstance().getUserInfo();
        bitField0_ = (bitField0_ & ~0x00000010);
        onChanged();
        return this;
      }
      /**
       * <code>optional string userInfo = 5;</code>
       * @param value The bytes for userInfo to set.
       * @return This builder for chaining.
       */
      public Builder setUserInfoBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        userInfo_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.URLProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.URLProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.URLProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.URLProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.URLProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<URLProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<URLProto>() {
      @java.lang.Override
      public URLProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<URLProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<URLProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.URLProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface LocalResourceProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.LocalResourceProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
     * @return Whether the resource field is set.
     */
    boolean hasResource();
    /**
     * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
     * @return The resource.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.URLProto getResource();
    /**
     * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder getResourceOrBuilder();

    /**
     * <code>optional int64 size = 2;</code>
     * @return Whether the size field is set.
     */
    boolean hasSize();
    /**
     * <code>optional int64 size = 2;</code>
     * @return The size.
     */
    long getSize();

    /**
     * <code>optional int64 timestamp = 3;</code>
     * @return Whether the timestamp field is set.
     */
    boolean hasTimestamp();
    /**
     * <code>optional int64 timestamp = 3;</code>
     * @return The timestamp.
     */
    long getTimestamp();

    /**
     * <code>optional .hadoop.yarn.LocalResourceTypeProto type = 4;</code>
     * @return Whether the type field is set.
     */
    boolean hasType();
    /**
     * <code>optional .hadoop.yarn.LocalResourceTypeProto type = 4;</code>
     * @return The type.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto getType();

    /**
     * <code>optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5;</code>
     * @return Whether the visibility field is set.
     */
    boolean hasVisibility();
    /**
     * <code>optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5;</code>
     * @return The visibility.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto getVisibility();

    /**
     * <code>optional string pattern = 6;</code>
     * @return Whether the pattern field is set.
     */
    boolean hasPattern();
    /**
     * <code>optional string pattern = 6;</code>
     * @return The pattern.
     */
    java.lang.String getPattern();
    /**
     * <code>optional string pattern = 6;</code>
     * @return The bytes for pattern.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getPatternBytes();

    /**
     * <code>optional bool should_be_uploaded_to_shared_cache = 7;</code>
     * @return Whether the shouldBeUploadedToSharedCache field is set.
     */
    boolean hasShouldBeUploadedToSharedCache();
    /**
     * <code>optional bool should_be_uploaded_to_shared_cache = 7;</code>
     * @return The shouldBeUploadedToSharedCache.
     */
    boolean getShouldBeUploadedToSharedCache();
  }
  /**
   * Protobuf type {@code hadoop.yarn.LocalResourceProto}
   */
  public static final class LocalResourceProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.LocalResourceProto)
      LocalResourceProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use LocalResourceProto.newBuilder() to construct.
    private LocalResourceProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private LocalResourceProto() {
      type_ = 1;
      visibility_ = 1;
      pattern_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new LocalResourceProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LocalResourceProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LocalResourceProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESOURCE_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.URLProto resource_;
    /**
     * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
     * @return Whether the resource field is set.
     */
    @java.lang.Override
    public boolean hasResource() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
     * @return The resource.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.URLProto getResource() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance() : resource_;
    }
    /**
     * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder getResourceOrBuilder() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance() : resource_;
    }

    public static final int SIZE_FIELD_NUMBER = 2;
    private long size_ = 0L;
    /**
     * <code>optional int64 size = 2;</code>
     * @return Whether the size field is set.
     */
    @java.lang.Override
    public boolean hasSize() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 size = 2;</code>
     * @return The size.
     */
    @java.lang.Override
    public long getSize() {
      return size_;
    }

    public static final int TIMESTAMP_FIELD_NUMBER = 3;
    private long timestamp_ = 0L;
    /**
     * <code>optional int64 timestamp = 3;</code>
     * @return Whether the timestamp field is set.
     */
    @java.lang.Override
    public boolean hasTimestamp() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int64 timestamp = 3;</code>
     * @return The timestamp.
     */
    @java.lang.Override
    public long getTimestamp() {
      return timestamp_;
    }

    public static final int TYPE_FIELD_NUMBER = 4;
    private int type_ = 1;
    /**
     * <code>optional .hadoop.yarn.LocalResourceTypeProto type = 4;</code>
     * @return Whether the type field is set.
     */
    @java.lang.Override public boolean hasType() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.LocalResourceTypeProto type = 4;</code>
     * @return The type.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto getType() {
      org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto.forNumber(type_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto.ARCHIVE : result;
    }

    public static final int VISIBILITY_FIELD_NUMBER = 5;
    private int visibility_ = 1;
    /**
     * <code>optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5;</code>
     * @return Whether the visibility field is set.
     */
    @java.lang.Override public boolean hasVisibility() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5;</code>
     * @return The visibility.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto getVisibility() {
      org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto result = org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto.forNumber(visibility_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto.PUBLIC : result;
    }

    public static final int PATTERN_FIELD_NUMBER = 6;
    @SuppressWarnings("serial")
    private volatile java.lang.Object pattern_ = "";
    /**
     * <code>optional string pattern = 6;</code>
     * @return Whether the pattern field is set.
     */
    @java.lang.Override
    public boolean hasPattern() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional string pattern = 6;</code>
     * @return The pattern.
     */
    @java.lang.Override
    public java.lang.String getPattern() {
      java.lang.Object ref = pattern_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          pattern_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string pattern = 6;</code>
     * @return The bytes for pattern.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getPatternBytes() {
      java.lang.Object ref = pattern_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        pattern_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int SHOULD_BE_UPLOADED_TO_SHARED_CACHE_FIELD_NUMBER = 7;
    private boolean shouldBeUploadedToSharedCache_ = false;
    /**
     * <code>optional bool should_be_uploaded_to_shared_cache = 7;</code>
     * @return Whether the shouldBeUploadedToSharedCache field is set.
     */
    @java.lang.Override
    public boolean hasShouldBeUploadedToSharedCache() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional bool should_be_uploaded_to_shared_cache = 7;</code>
     * @return The shouldBeUploadedToSharedCache.
     */
    @java.lang.Override
    public boolean getShouldBeUploadedToSharedCache() {
      return shouldBeUploadedToSharedCache_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getResource());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, size_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(3, timestamp_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeEnum(4, type_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeEnum(5, visibility_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 6, pattern_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeBool(7, shouldBeUploadedToSharedCache_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getResource());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, size_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(3, timestamp_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(4, type_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(5, visibility_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(6, pattern_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(7, shouldBeUploadedToSharedCache_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto other = (org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto) obj;

      if (hasResource() != other.hasResource()) return false;
      if (hasResource()) {
        if (!getResource()
            .equals(other.getResource())) return false;
      }
      if (hasSize() != other.hasSize()) return false;
      if (hasSize()) {
        if (getSize()
            != other.getSize()) return false;
      }
      if (hasTimestamp() != other.hasTimestamp()) return false;
      if (hasTimestamp()) {
        if (getTimestamp()
            != other.getTimestamp()) return false;
      }
      if (hasType() != other.hasType()) return false;
      if (hasType()) {
        if (type_ != other.type_) return false;
      }
      if (hasVisibility() != other.hasVisibility()) return false;
      if (hasVisibility()) {
        if (visibility_ != other.visibility_) return false;
      }
      if (hasPattern() != other.hasPattern()) return false;
      if (hasPattern()) {
        if (!getPattern()
            .equals(other.getPattern())) return false;
      }
      if (hasShouldBeUploadedToSharedCache() != other.hasShouldBeUploadedToSharedCache()) return false;
      if (hasShouldBeUploadedToSharedCache()) {
        if (getShouldBeUploadedToSharedCache()
            != other.getShouldBeUploadedToSharedCache()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasResource()) {
        hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getResource().hashCode();
      }
      if (hasSize()) {
        hash = (37 * hash) + SIZE_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getSize());
      }
      if (hasTimestamp()) {
        hash = (37 * hash) + TIMESTAMP_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getTimestamp());
      }
      if (hasType()) {
        hash = (37 * hash) + TYPE_FIELD_NUMBER;
        hash = (53 * hash) + type_;
      }
      if (hasVisibility()) {
        hash = (37 * hash) + VISIBILITY_FIELD_NUMBER;
        hash = (53 * hash) + visibility_;
      }
      if (hasPattern()) {
        hash = (37 * hash) + PATTERN_FIELD_NUMBER;
        hash = (53 * hash) + getPattern().hashCode();
      }
      if (hasShouldBeUploadedToSharedCache()) {
        hash = (37 * hash) + SHOULD_BE_UPLOADED_TO_SHARED_CACHE_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getShouldBeUploadedToSharedCache());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.LocalResourceProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.LocalResourceProto)
        org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LocalResourceProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LocalResourceProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getResourceFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        size_ = 0L;
        timestamp_ = 0L;
        type_ = 1;
        visibility_ = 1;
        pattern_ = "";
        shouldBeUploadedToSharedCache_ = false;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LocalResourceProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto result = new org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.resource_ = resourceBuilder_ == null
              ? resource_
              : resourceBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.size_ = size_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.timestamp_ = timestamp_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.type_ = type_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.visibility_ = visibility_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.pattern_ = pattern_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.shouldBeUploadedToSharedCache_ = shouldBeUploadedToSharedCache_;
          to_bitField0_ |= 0x00000040;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance()) return this;
        if (other.hasResource()) {
          mergeResource(other.getResource());
        }
        if (other.hasSize()) {
          setSize(other.getSize());
        }
        if (other.hasTimestamp()) {
          setTimestamp(other.getTimestamp());
        }
        if (other.hasType()) {
          setType(other.getType());
        }
        if (other.hasVisibility()) {
          setVisibility(other.getVisibility());
        }
        if (other.hasPattern()) {
          pattern_ = other.pattern_;
          bitField0_ |= 0x00000020;
          onChanged();
        }
        if (other.hasShouldBeUploadedToSharedCache()) {
          setShouldBeUploadedToSharedCache(other.getShouldBeUploadedToSharedCache());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getResourceFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                size_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 24: {
                timestamp_ = input.readInt64();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 32: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(4, tmpRaw);
                } else {
                  type_ = tmpRaw;
                  bitField0_ |= 0x00000008;
                }
                break;
              } // case 32
              case 40: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(5, tmpRaw);
                } else {
                  visibility_ = tmpRaw;
                  bitField0_ |= 0x00000010;
                }
                break;
              } // case 40
              case 50: {
                pattern_ = input.readBytes();
                bitField0_ |= 0x00000020;
                break;
              } // case 50
              case 56: {
                shouldBeUploadedToSharedCache_ = input.readBool();
                bitField0_ |= 0x00000040;
                break;
              } // case 56
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.URLProto resource_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.URLProto, org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder> resourceBuilder_;
      /**
       * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
       * @return Whether the resource field is set.
       */
      public boolean hasResource() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
       * @return The resource.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.URLProto getResource() {
        if (resourceBuilder_ == null) {
          return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance() : resource_;
        } else {
          return resourceBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
       */
      public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.URLProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resource_ = value;
        } else {
          resourceBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
       */
      public Builder setResource(
          org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          resource_ = builderForValue.build();
        } else {
          resourceBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
       */
      public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.URLProto value) {
        if (resourceBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            resource_ != null &&
            resource_ != org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance()) {
            getResourceBuilder().mergeFrom(value);
          } else {
            resource_ = value;
          }
        } else {
          resourceBuilder_.mergeFrom(value);
        }
        if (resource_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
       */
      public Builder clearResource() {
        bitField0_ = (bitField0_ & ~0x00000001);
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder getResourceBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getResourceFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder getResourceOrBuilder() {
        if (resourceBuilder_ != null) {
          return resourceBuilder_.getMessageOrBuilder();
        } else {
          return resource_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.URLProto.getDefaultInstance() : resource_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.URLProto resource = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.URLProto, org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder> 
          getResourceFieldBuilder() {
        if (resourceBuilder_ == null) {
          resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.URLProto, org.apache.hadoop.yarn.proto.YarnProtos.URLProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.URLProtoOrBuilder>(
                  getResource(),
                  getParentForChildren(),
                  isClean());
          resource_ = null;
        }
        return resourceBuilder_;
      }

      private long size_ ;
      /**
       * <code>optional int64 size = 2;</code>
       * @return Whether the size field is set.
       */
      @java.lang.Override
      public boolean hasSize() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int64 size = 2;</code>
       * @return The size.
       */
      @java.lang.Override
      public long getSize() {
        return size_;
      }
      /**
       * <code>optional int64 size = 2;</code>
       * @param value The size to set.
       * @return This builder for chaining.
       */
      public Builder setSize(long value) {

        size_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 size = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearSize() {
        bitField0_ = (bitField0_ & ~0x00000002);
        size_ = 0L;
        onChanged();
        return this;
      }

      private long timestamp_ ;
      /**
       * <code>optional int64 timestamp = 3;</code>
       * @return Whether the timestamp field is set.
       */
      @java.lang.Override
      public boolean hasTimestamp() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int64 timestamp = 3;</code>
       * @return The timestamp.
       */
      @java.lang.Override
      public long getTimestamp() {
        return timestamp_;
      }
      /**
       * <code>optional int64 timestamp = 3;</code>
       * @param value The timestamp to set.
       * @return This builder for chaining.
       */
      public Builder setTimestamp(long value) {

        timestamp_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 timestamp = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearTimestamp() {
        bitField0_ = (bitField0_ & ~0x00000004);
        timestamp_ = 0L;
        onChanged();
        return this;
      }

      private int type_ = 1;
      /**
       * <code>optional .hadoop.yarn.LocalResourceTypeProto type = 4;</code>
       * @return Whether the type field is set.
       */
      @java.lang.Override public boolean hasType() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceTypeProto type = 4;</code>
       * @return The type.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto getType() {
        org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto.forNumber(type_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto.ARCHIVE : result;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceTypeProto type = 4;</code>
       * @param value The type to set.
       * @return This builder for chaining.
       */
      public Builder setType(org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000008;
        type_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceTypeProto type = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearType() {
        bitField0_ = (bitField0_ & ~0x00000008);
        type_ = 1;
        onChanged();
        return this;
      }

      private int visibility_ = 1;
      /**
       * <code>optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5;</code>
       * @return Whether the visibility field is set.
       */
      @java.lang.Override public boolean hasVisibility() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5;</code>
       * @return The visibility.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto getVisibility() {
        org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto result = org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto.forNumber(visibility_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto.PUBLIC : result;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5;</code>
       * @param value The visibility to set.
       * @return This builder for chaining.
       */
      public Builder setVisibility(org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceVisibilityProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000010;
        visibility_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceVisibilityProto visibility = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearVisibility() {
        bitField0_ = (bitField0_ & ~0x00000010);
        visibility_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object pattern_ = "";
      /**
       * <code>optional string pattern = 6;</code>
       * @return Whether the pattern field is set.
       */
      public boolean hasPattern() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional string pattern = 6;</code>
       * @return The pattern.
       */
      public java.lang.String getPattern() {
        java.lang.Object ref = pattern_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            pattern_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string pattern = 6;</code>
       * @return The bytes for pattern.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getPatternBytes() {
        java.lang.Object ref = pattern_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          pattern_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string pattern = 6;</code>
       * @param value The pattern to set.
       * @return This builder for chaining.
       */
      public Builder setPattern(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        pattern_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional string pattern = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearPattern() {
        pattern_ = getDefaultInstance().getPattern();
        bitField0_ = (bitField0_ & ~0x00000020);
        onChanged();
        return this;
      }
      /**
       * <code>optional string pattern = 6;</code>
       * @param value The bytes for pattern to set.
       * @return This builder for chaining.
       */
      public Builder setPatternBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        pattern_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }

      private boolean shouldBeUploadedToSharedCache_ ;
      /**
       * <code>optional bool should_be_uploaded_to_shared_cache = 7;</code>
       * @return Whether the shouldBeUploadedToSharedCache field is set.
       */
      @java.lang.Override
      public boolean hasShouldBeUploadedToSharedCache() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional bool should_be_uploaded_to_shared_cache = 7;</code>
       * @return The shouldBeUploadedToSharedCache.
       */
      @java.lang.Override
      public boolean getShouldBeUploadedToSharedCache() {
        return shouldBeUploadedToSharedCache_;
      }
      /**
       * <code>optional bool should_be_uploaded_to_shared_cache = 7;</code>
       * @param value The shouldBeUploadedToSharedCache to set.
       * @return This builder for chaining.
       */
      public Builder setShouldBeUploadedToSharedCache(boolean value) {

        shouldBeUploadedToSharedCache_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool should_be_uploaded_to_shared_cache = 7;</code>
       * @return This builder for chaining.
       */
      public Builder clearShouldBeUploadedToSharedCache() {
        bitField0_ = (bitField0_ & ~0x00000040);
        shouldBeUploadedToSharedCache_ = false;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.LocalResourceProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.LocalResourceProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<LocalResourceProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<LocalResourceProto>() {
      @java.lang.Override
      public LocalResourceProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<LocalResourceProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<LocalResourceProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StringLongMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StringLongMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string key = 1;</code>
     * @return Whether the key field is set.
     */
    boolean hasKey();
    /**
     * <code>required string key = 1;</code>
     * @return The key.
     */
    java.lang.String getKey();
    /**
     * <code>required string key = 1;</code>
     * @return The bytes for key.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes();

    /**
     * <code>required int64 value = 2;</code>
     * @return Whether the value field is set.
     */
    boolean hasValue();
    /**
     * <code>required int64 value = 2;</code>
     * @return The value.
     */
    long getValue();
  }
  /**
   * Protobuf type {@code hadoop.yarn.StringLongMapProto}
   */
  public static final class StringLongMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StringLongMapProto)
      StringLongMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StringLongMapProto.newBuilder() to construct.
    private StringLongMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StringLongMapProto() {
      key_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StringLongMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLongMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLongMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int KEY_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object key_ = "";
    /**
     * <code>required string key = 1;</code>
     * @return Whether the key field is set.
     */
    @java.lang.Override
    public boolean hasKey() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string key = 1;</code>
     * @return The key.
     */
    @java.lang.Override
    public java.lang.String getKey() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          key_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string key = 1;</code>
     * @return The bytes for key.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        key_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int VALUE_FIELD_NUMBER = 2;
    private long value_ = 0L;
    /**
     * <code>required int64 value = 2;</code>
     * @return Whether the value field is set.
     */
    @java.lang.Override
    public boolean hasValue() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required int64 value = 2;</code>
     * @return The value.
     */
    @java.lang.Override
    public long getValue() {
      return value_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasKey()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasValue()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, value_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, value_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto) obj;

      if (hasKey() != other.hasKey()) return false;
      if (hasKey()) {
        if (!getKey()
            .equals(other.getKey())) return false;
      }
      if (hasValue() != other.hasValue()) return false;
      if (hasValue()) {
        if (getValue()
            != other.getValue()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasKey()) {
        hash = (37 * hash) + KEY_FIELD_NUMBER;
        hash = (53 * hash) + getKey().hashCode();
      }
      if (hasValue()) {
        hash = (37 * hash) + VALUE_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getValue());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StringLongMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StringLongMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLongMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLongMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        key_ = "";
        value_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLongMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.key_ = key_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.value_ = value_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance()) return this;
        if (other.hasKey()) {
          key_ = other.key_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasValue()) {
          setValue(other.getValue());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasKey()) {
          return false;
        }
        if (!hasValue()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                key_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                value_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object key_ = "";
      /**
       * <code>required string key = 1;</code>
       * @return Whether the key field is set.
       */
      public boolean hasKey() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string key = 1;</code>
       * @return The key.
       */
      public java.lang.String getKey() {
        java.lang.Object ref = key_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            key_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string key = 1;</code>
       * @return The bytes for key.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getKeyBytes() {
        java.lang.Object ref = key_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          key_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string key = 1;</code>
       * @param value The key to set.
       * @return This builder for chaining.
       */
      public Builder setKey(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string key = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearKey() {
        key_ = getDefaultInstance().getKey();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string key = 1;</code>
       * @param value The bytes for key to set.
       * @return This builder for chaining.
       */
      public Builder setKeyBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private long value_ ;
      /**
       * <code>required int64 value = 2;</code>
       * @return Whether the value field is set.
       */
      @java.lang.Override
      public boolean hasValue() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required int64 value = 2;</code>
       * @return The value.
       */
      @java.lang.Override
      public long getValue() {
        return value_;
      }
      /**
       * <code>required int64 value = 2;</code>
       * @param value The value to set.
       * @return This builder for chaining.
       */
      public Builder setValue(long value) {

        value_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required int64 value = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearValue() {
        bitField0_ = (bitField0_ & ~0x00000002);
        value_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StringLongMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StringLongMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StringLongMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StringLongMapProto>() {
      @java.lang.Override
      public StringLongMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StringLongMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StringLongMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StringFloatMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StringFloatMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string key = 1;</code>
     * @return Whether the key field is set.
     */
    boolean hasKey();
    /**
     * <code>required string key = 1;</code>
     * @return The key.
     */
    java.lang.String getKey();
    /**
     * <code>required string key = 1;</code>
     * @return The bytes for key.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes();

    /**
     * <code>required float value = 2;</code>
     * @return Whether the value field is set.
     */
    boolean hasValue();
    /**
     * <code>required float value = 2;</code>
     * @return The value.
     */
    float getValue();
  }
  /**
   * Protobuf type {@code hadoop.yarn.StringFloatMapProto}
   */
  public static final class StringFloatMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StringFloatMapProto)
      StringFloatMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StringFloatMapProto.newBuilder() to construct.
    private StringFloatMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StringFloatMapProto() {
      key_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StringFloatMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringFloatMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringFloatMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int KEY_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object key_ = "";
    /**
     * <code>required string key = 1;</code>
     * @return Whether the key field is set.
     */
    @java.lang.Override
    public boolean hasKey() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string key = 1;</code>
     * @return The key.
     */
    @java.lang.Override
    public java.lang.String getKey() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          key_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string key = 1;</code>
     * @return The bytes for key.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        key_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int VALUE_FIELD_NUMBER = 2;
    private float value_ = 0F;
    /**
     * <code>required float value = 2;</code>
     * @return Whether the value field is set.
     */
    @java.lang.Override
    public boolean hasValue() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required float value = 2;</code>
     * @return The value.
     */
    @java.lang.Override
    public float getValue() {
      return value_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasKey()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasValue()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeFloat(2, value_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(2, value_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto) obj;

      if (hasKey() != other.hasKey()) return false;
      if (hasKey()) {
        if (!getKey()
            .equals(other.getKey())) return false;
      }
      if (hasValue() != other.hasValue()) return false;
      if (hasValue()) {
        if (java.lang.Float.floatToIntBits(getValue())
            != java.lang.Float.floatToIntBits(
                other.getValue())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasKey()) {
        hash = (37 * hash) + KEY_FIELD_NUMBER;
        hash = (53 * hash) + getKey().hashCode();
      }
      if (hasValue()) {
        hash = (37 * hash) + VALUE_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getValue());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StringFloatMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StringFloatMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringFloatMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringFloatMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        key_ = "";
        value_ = 0F;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringFloatMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.key_ = key_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.value_ = value_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto.getDefaultInstance()) return this;
        if (other.hasKey()) {
          key_ = other.key_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasValue()) {
          setValue(other.getValue());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasKey()) {
          return false;
        }
        if (!hasValue()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                key_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 21: {
                value_ = input.readFloat();
                bitField0_ |= 0x00000002;
                break;
              } // case 21
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object key_ = "";
      /**
       * <code>required string key = 1;</code>
       * @return Whether the key field is set.
       */
      public boolean hasKey() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string key = 1;</code>
       * @return The key.
       */
      public java.lang.String getKey() {
        java.lang.Object ref = key_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            key_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string key = 1;</code>
       * @return The bytes for key.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getKeyBytes() {
        java.lang.Object ref = key_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          key_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string key = 1;</code>
       * @param value The key to set.
       * @return This builder for chaining.
       */
      public Builder setKey(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string key = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearKey() {
        key_ = getDefaultInstance().getKey();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string key = 1;</code>
       * @param value The bytes for key to set.
       * @return This builder for chaining.
       */
      public Builder setKeyBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private float value_ ;
      /**
       * <code>required float value = 2;</code>
       * @return Whether the value field is set.
       */
      @java.lang.Override
      public boolean hasValue() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required float value = 2;</code>
       * @return The value.
       */
      @java.lang.Override
      public float getValue() {
        return value_;
      }
      /**
       * <code>required float value = 2;</code>
       * @param value The value to set.
       * @return This builder for chaining.
       */
      public Builder setValue(float value) {

        value_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required float value = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearValue() {
        bitField0_ = (bitField0_ & ~0x00000002);
        value_ = 0F;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StringFloatMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StringFloatMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StringFloatMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StringFloatMapProto>() {
      @java.lang.Override
      public StringFloatMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StringFloatMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StringFloatMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringFloatMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationResourceUsageReportProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationResourceUsageReportProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int32 num_used_containers = 1;</code>
     * @return Whether the numUsedContainers field is set.
     */
    boolean hasNumUsedContainers();
    /**
     * <code>optional int32 num_used_containers = 1;</code>
     * @return The numUsedContainers.
     */
    int getNumUsedContainers();

    /**
     * <code>optional int32 num_reserved_containers = 2;</code>
     * @return Whether the numReservedContainers field is set.
     */
    boolean hasNumReservedContainers();
    /**
     * <code>optional int32 num_reserved_containers = 2;</code>
     * @return The numReservedContainers.
     */
    int getNumReservedContainers();

    /**
     * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
     * @return Whether the usedResources field is set.
     */
    boolean hasUsedResources();
    /**
     * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
     * @return The usedResources.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsedResources();
    /**
     * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedResourcesOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
     * @return Whether the reservedResources field is set.
     */
    boolean hasReservedResources();
    /**
     * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
     * @return The reservedResources.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getReservedResources();
    /**
     * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getReservedResourcesOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
     * @return Whether the neededResources field is set.
     */
    boolean hasNeededResources();
    /**
     * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
     * @return The neededResources.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getNeededResources();
    /**
     * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getNeededResourcesOrBuilder();

    /**
     * <code>optional int64 memory_seconds = 6;</code>
     * @return Whether the memorySeconds field is set.
     */
    boolean hasMemorySeconds();
    /**
     * <code>optional int64 memory_seconds = 6;</code>
     * @return The memorySeconds.
     */
    long getMemorySeconds();

    /**
     * <code>optional int64 vcore_seconds = 7;</code>
     * @return Whether the vcoreSeconds field is set.
     */
    boolean hasVcoreSeconds();
    /**
     * <code>optional int64 vcore_seconds = 7;</code>
     * @return The vcoreSeconds.
     */
    long getVcoreSeconds();

    /**
     * <code>optional float queue_usage_percentage = 8;</code>
     * @return Whether the queueUsagePercentage field is set.
     */
    boolean hasQueueUsagePercentage();
    /**
     * <code>optional float queue_usage_percentage = 8;</code>
     * @return The queueUsagePercentage.
     */
    float getQueueUsagePercentage();

    /**
     * <code>optional float cluster_usage_percentage = 9;</code>
     * @return Whether the clusterUsagePercentage field is set.
     */
    boolean hasClusterUsagePercentage();
    /**
     * <code>optional float cluster_usage_percentage = 9;</code>
     * @return The clusterUsagePercentage.
     */
    float getClusterUsagePercentage();

    /**
     * <code>optional int64 preempted_memory_seconds = 10;</code>
     * @return Whether the preemptedMemorySeconds field is set.
     */
    boolean hasPreemptedMemorySeconds();
    /**
     * <code>optional int64 preempted_memory_seconds = 10;</code>
     * @return The preemptedMemorySeconds.
     */
    long getPreemptedMemorySeconds();

    /**
     * <code>optional int64 preempted_vcore_seconds = 11;</code>
     * @return Whether the preemptedVcoreSeconds field is set.
     */
    boolean hasPreemptedVcoreSeconds();
    /**
     * <code>optional int64 preempted_vcore_seconds = 11;</code>
     * @return The preemptedVcoreSeconds.
     */
    long getPreemptedVcoreSeconds();

    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> 
        getApplicationResourceUsageMapList();
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationResourceUsageMap(int index);
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
     */
    int getApplicationResourceUsageMapCount();
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> 
        getApplicationResourceUsageMapOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationResourceUsageMapOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> 
        getApplicationPreemptedResourceUsageMapList();
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationPreemptedResourceUsageMap(int index);
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
     */
    int getApplicationPreemptedResourceUsageMapCount();
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> 
        getApplicationPreemptedResourceUsageMapOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationPreemptedResourceUsageMapOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationResourceUsageReportProto}
   */
  public static final class ApplicationResourceUsageReportProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationResourceUsageReportProto)
      ApplicationResourceUsageReportProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationResourceUsageReportProto.newBuilder() to construct.
    private ApplicationResourceUsageReportProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ApplicationResourceUsageReportProto() {
      applicationResourceUsageMap_ = java.util.Collections.emptyList();
      applicationPreemptedResourceUsageMap_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ApplicationResourceUsageReportProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder.class);
    }

    private int bitField0_;
    public static final int NUM_USED_CONTAINERS_FIELD_NUMBER = 1;
    private int numUsedContainers_ = 0;
    /**
     * <code>optional int32 num_used_containers = 1;</code>
     * @return Whether the numUsedContainers field is set.
     */
    @java.lang.Override
    public boolean hasNumUsedContainers() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int32 num_used_containers = 1;</code>
     * @return The numUsedContainers.
     */
    @java.lang.Override
    public int getNumUsedContainers() {
      return numUsedContainers_;
    }

    public static final int NUM_RESERVED_CONTAINERS_FIELD_NUMBER = 2;
    private int numReservedContainers_ = 0;
    /**
     * <code>optional int32 num_reserved_containers = 2;</code>
     * @return Whether the numReservedContainers field is set.
     */
    @java.lang.Override
    public boolean hasNumReservedContainers() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 num_reserved_containers = 2;</code>
     * @return The numReservedContainers.
     */
    @java.lang.Override
    public int getNumReservedContainers() {
      return numReservedContainers_;
    }

    public static final int USED_RESOURCES_FIELD_NUMBER = 3;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto usedResources_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
     * @return Whether the usedResources field is set.
     */
    @java.lang.Override
    public boolean hasUsedResources() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
     * @return The usedResources.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsedResources() {
      return usedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : usedResources_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedResourcesOrBuilder() {
      return usedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : usedResources_;
    }

    public static final int RESERVED_RESOURCES_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto reservedResources_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
     * @return Whether the reservedResources field is set.
     */
    @java.lang.Override
    public boolean hasReservedResources() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
     * @return The reservedResources.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getReservedResources() {
      return reservedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : reservedResources_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getReservedResourcesOrBuilder() {
      return reservedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : reservedResources_;
    }

    public static final int NEEDED_RESOURCES_FIELD_NUMBER = 5;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto neededResources_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
     * @return Whether the neededResources field is set.
     */
    @java.lang.Override
    public boolean hasNeededResources() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
     * @return The neededResources.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getNeededResources() {
      return neededResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : neededResources_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getNeededResourcesOrBuilder() {
      return neededResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : neededResources_;
    }

    public static final int MEMORY_SECONDS_FIELD_NUMBER = 6;
    private long memorySeconds_ = 0L;
    /**
     * <code>optional int64 memory_seconds = 6;</code>
     * @return Whether the memorySeconds field is set.
     */
    @java.lang.Override
    public boolean hasMemorySeconds() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional int64 memory_seconds = 6;</code>
     * @return The memorySeconds.
     */
    @java.lang.Override
    public long getMemorySeconds() {
      return memorySeconds_;
    }

    public static final int VCORE_SECONDS_FIELD_NUMBER = 7;
    private long vcoreSeconds_ = 0L;
    /**
     * <code>optional int64 vcore_seconds = 7;</code>
     * @return Whether the vcoreSeconds field is set.
     */
    @java.lang.Override
    public boolean hasVcoreSeconds() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional int64 vcore_seconds = 7;</code>
     * @return The vcoreSeconds.
     */
    @java.lang.Override
    public long getVcoreSeconds() {
      return vcoreSeconds_;
    }

    public static final int QUEUE_USAGE_PERCENTAGE_FIELD_NUMBER = 8;
    private float queueUsagePercentage_ = 0F;
    /**
     * <code>optional float queue_usage_percentage = 8;</code>
     * @return Whether the queueUsagePercentage field is set.
     */
    @java.lang.Override
    public boolean hasQueueUsagePercentage() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional float queue_usage_percentage = 8;</code>
     * @return The queueUsagePercentage.
     */
    @java.lang.Override
    public float getQueueUsagePercentage() {
      return queueUsagePercentage_;
    }

    public static final int CLUSTER_USAGE_PERCENTAGE_FIELD_NUMBER = 9;
    private float clusterUsagePercentage_ = 0F;
    /**
     * <code>optional float cluster_usage_percentage = 9;</code>
     * @return Whether the clusterUsagePercentage field is set.
     */
    @java.lang.Override
    public boolean hasClusterUsagePercentage() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional float cluster_usage_percentage = 9;</code>
     * @return The clusterUsagePercentage.
     */
    @java.lang.Override
    public float getClusterUsagePercentage() {
      return clusterUsagePercentage_;
    }

    public static final int PREEMPTED_MEMORY_SECONDS_FIELD_NUMBER = 10;
    private long preemptedMemorySeconds_ = 0L;
    /**
     * <code>optional int64 preempted_memory_seconds = 10;</code>
     * @return Whether the preemptedMemorySeconds field is set.
     */
    @java.lang.Override
    public boolean hasPreemptedMemorySeconds() {
      return ((bitField0_ & 0x00000200) != 0);
    }
    /**
     * <code>optional int64 preempted_memory_seconds = 10;</code>
     * @return The preemptedMemorySeconds.
     */
    @java.lang.Override
    public long getPreemptedMemorySeconds() {
      return preemptedMemorySeconds_;
    }

    public static final int PREEMPTED_VCORE_SECONDS_FIELD_NUMBER = 11;
    private long preemptedVcoreSeconds_ = 0L;
    /**
     * <code>optional int64 preempted_vcore_seconds = 11;</code>
     * @return Whether the preemptedVcoreSeconds field is set.
     */
    @java.lang.Override
    public boolean hasPreemptedVcoreSeconds() {
      return ((bitField0_ & 0x00000400) != 0);
    }
    /**
     * <code>optional int64 preempted_vcore_seconds = 11;</code>
     * @return The preemptedVcoreSeconds.
     */
    @java.lang.Override
    public long getPreemptedVcoreSeconds() {
      return preemptedVcoreSeconds_;
    }

    public static final int APPLICATION_RESOURCE_USAGE_MAP_FIELD_NUMBER = 12;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> applicationResourceUsageMap_;
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> getApplicationResourceUsageMapList() {
      return applicationResourceUsageMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> 
        getApplicationResourceUsageMapOrBuilderList() {
      return applicationResourceUsageMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
     */
    @java.lang.Override
    public int getApplicationResourceUsageMapCount() {
      return applicationResourceUsageMap_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationResourceUsageMap(int index) {
      return applicationResourceUsageMap_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationResourceUsageMapOrBuilder(
        int index) {
      return applicationResourceUsageMap_.get(index);
    }

    public static final int APPLICATION_PREEMPTED_RESOURCE_USAGE_MAP_FIELD_NUMBER = 13;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> applicationPreemptedResourceUsageMap_;
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> getApplicationPreemptedResourceUsageMapList() {
      return applicationPreemptedResourceUsageMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> 
        getApplicationPreemptedResourceUsageMapOrBuilderList() {
      return applicationPreemptedResourceUsageMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
     */
    @java.lang.Override
    public int getApplicationPreemptedResourceUsageMapCount() {
      return applicationPreemptedResourceUsageMap_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationPreemptedResourceUsageMap(int index) {
      return applicationPreemptedResourceUsageMap_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationPreemptedResourceUsageMapOrBuilder(
        int index) {
      return applicationPreemptedResourceUsageMap_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasUsedResources()) {
        if (!getUsedResources().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasReservedResources()) {
        if (!getReservedResources().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasNeededResources()) {
        if (!getNeededResources().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getApplicationResourceUsageMapCount(); i++) {
        if (!getApplicationResourceUsageMap(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getApplicationPreemptedResourceUsageMapCount(); i++) {
        if (!getApplicationPreemptedResourceUsageMap(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, numUsedContainers_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, numReservedContainers_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeMessage(3, getUsedResources());
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(4, getReservedResources());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeMessage(5, getNeededResources());
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt64(6, memorySeconds_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeInt64(7, vcoreSeconds_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeFloat(8, queueUsagePercentage_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeFloat(9, clusterUsagePercentage_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        output.writeInt64(10, preemptedMemorySeconds_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        output.writeInt64(11, preemptedVcoreSeconds_);
      }
      for (int i = 0; i < applicationResourceUsageMap_.size(); i++) {
        output.writeMessage(12, applicationResourceUsageMap_.get(i));
      }
      for (int i = 0; i < applicationPreemptedResourceUsageMap_.size(); i++) {
        output.writeMessage(13, applicationPreemptedResourceUsageMap_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, numUsedContainers_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, numReservedContainers_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, getUsedResources());
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getReservedResources());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(5, getNeededResources());
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(6, memorySeconds_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(7, vcoreSeconds_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(8, queueUsagePercentage_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(9, clusterUsagePercentage_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(10, preemptedMemorySeconds_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(11, preemptedVcoreSeconds_);
      }
      for (int i = 0; i < applicationResourceUsageMap_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(12, applicationResourceUsageMap_.get(i));
      }
      for (int i = 0; i < applicationPreemptedResourceUsageMap_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(13, applicationPreemptedResourceUsageMap_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto) obj;

      if (hasNumUsedContainers() != other.hasNumUsedContainers()) return false;
      if (hasNumUsedContainers()) {
        if (getNumUsedContainers()
            != other.getNumUsedContainers()) return false;
      }
      if (hasNumReservedContainers() != other.hasNumReservedContainers()) return false;
      if (hasNumReservedContainers()) {
        if (getNumReservedContainers()
            != other.getNumReservedContainers()) return false;
      }
      if (hasUsedResources() != other.hasUsedResources()) return false;
      if (hasUsedResources()) {
        if (!getUsedResources()
            .equals(other.getUsedResources())) return false;
      }
      if (hasReservedResources() != other.hasReservedResources()) return false;
      if (hasReservedResources()) {
        if (!getReservedResources()
            .equals(other.getReservedResources())) return false;
      }
      if (hasNeededResources() != other.hasNeededResources()) return false;
      if (hasNeededResources()) {
        if (!getNeededResources()
            .equals(other.getNeededResources())) return false;
      }
      if (hasMemorySeconds() != other.hasMemorySeconds()) return false;
      if (hasMemorySeconds()) {
        if (getMemorySeconds()
            != other.getMemorySeconds()) return false;
      }
      if (hasVcoreSeconds() != other.hasVcoreSeconds()) return false;
      if (hasVcoreSeconds()) {
        if (getVcoreSeconds()
            != other.getVcoreSeconds()) return false;
      }
      if (hasQueueUsagePercentage() != other.hasQueueUsagePercentage()) return false;
      if (hasQueueUsagePercentage()) {
        if (java.lang.Float.floatToIntBits(getQueueUsagePercentage())
            != java.lang.Float.floatToIntBits(
                other.getQueueUsagePercentage())) return false;
      }
      if (hasClusterUsagePercentage() != other.hasClusterUsagePercentage()) return false;
      if (hasClusterUsagePercentage()) {
        if (java.lang.Float.floatToIntBits(getClusterUsagePercentage())
            != java.lang.Float.floatToIntBits(
                other.getClusterUsagePercentage())) return false;
      }
      if (hasPreemptedMemorySeconds() != other.hasPreemptedMemorySeconds()) return false;
      if (hasPreemptedMemorySeconds()) {
        if (getPreemptedMemorySeconds()
            != other.getPreemptedMemorySeconds()) return false;
      }
      if (hasPreemptedVcoreSeconds() != other.hasPreemptedVcoreSeconds()) return false;
      if (hasPreemptedVcoreSeconds()) {
        if (getPreemptedVcoreSeconds()
            != other.getPreemptedVcoreSeconds()) return false;
      }
      if (!getApplicationResourceUsageMapList()
          .equals(other.getApplicationResourceUsageMapList())) return false;
      if (!getApplicationPreemptedResourceUsageMapList()
          .equals(other.getApplicationPreemptedResourceUsageMapList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNumUsedContainers()) {
        hash = (37 * hash) + NUM_USED_CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getNumUsedContainers();
      }
      if (hasNumReservedContainers()) {
        hash = (37 * hash) + NUM_RESERVED_CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getNumReservedContainers();
      }
      if (hasUsedResources()) {
        hash = (37 * hash) + USED_RESOURCES_FIELD_NUMBER;
        hash = (53 * hash) + getUsedResources().hashCode();
      }
      if (hasReservedResources()) {
        hash = (37 * hash) + RESERVED_RESOURCES_FIELD_NUMBER;
        hash = (53 * hash) + getReservedResources().hashCode();
      }
      if (hasNeededResources()) {
        hash = (37 * hash) + NEEDED_RESOURCES_FIELD_NUMBER;
        hash = (53 * hash) + getNeededResources().hashCode();
      }
      if (hasMemorySeconds()) {
        hash = (37 * hash) + MEMORY_SECONDS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getMemorySeconds());
      }
      if (hasVcoreSeconds()) {
        hash = (37 * hash) + VCORE_SECONDS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getVcoreSeconds());
      }
      if (hasQueueUsagePercentage()) {
        hash = (37 * hash) + QUEUE_USAGE_PERCENTAGE_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getQueueUsagePercentage());
      }
      if (hasClusterUsagePercentage()) {
        hash = (37 * hash) + CLUSTER_USAGE_PERCENTAGE_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getClusterUsagePercentage());
      }
      if (hasPreemptedMemorySeconds()) {
        hash = (37 * hash) + PREEMPTED_MEMORY_SECONDS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getPreemptedMemorySeconds());
      }
      if (hasPreemptedVcoreSeconds()) {
        hash = (37 * hash) + PREEMPTED_VCORE_SECONDS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getPreemptedVcoreSeconds());
      }
      if (getApplicationResourceUsageMapCount() > 0) {
        hash = (37 * hash) + APPLICATION_RESOURCE_USAGE_MAP_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationResourceUsageMapList().hashCode();
      }
      if (getApplicationPreemptedResourceUsageMapCount() > 0) {
        hash = (37 * hash) + APPLICATION_PREEMPTED_RESOURCE_USAGE_MAP_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationPreemptedResourceUsageMapList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationResourceUsageReportProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationResourceUsageReportProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getUsedResourcesFieldBuilder();
          getReservedResourcesFieldBuilder();
          getNeededResourcesFieldBuilder();
          getApplicationResourceUsageMapFieldBuilder();
          getApplicationPreemptedResourceUsageMapFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        numUsedContainers_ = 0;
        numReservedContainers_ = 0;
        usedResources_ = null;
        if (usedResourcesBuilder_ != null) {
          usedResourcesBuilder_.dispose();
          usedResourcesBuilder_ = null;
        }
        reservedResources_ = null;
        if (reservedResourcesBuilder_ != null) {
          reservedResourcesBuilder_.dispose();
          reservedResourcesBuilder_ = null;
        }
        neededResources_ = null;
        if (neededResourcesBuilder_ != null) {
          neededResourcesBuilder_.dispose();
          neededResourcesBuilder_ = null;
        }
        memorySeconds_ = 0L;
        vcoreSeconds_ = 0L;
        queueUsagePercentage_ = 0F;
        clusterUsagePercentage_ = 0F;
        preemptedMemorySeconds_ = 0L;
        preemptedVcoreSeconds_ = 0L;
        if (applicationResourceUsageMapBuilder_ == null) {
          applicationResourceUsageMap_ = java.util.Collections.emptyList();
        } else {
          applicationResourceUsageMap_ = null;
          applicationResourceUsageMapBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000800);
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          applicationPreemptedResourceUsageMap_ = java.util.Collections.emptyList();
        } else {
          applicationPreemptedResourceUsageMap_ = null;
          applicationPreemptedResourceUsageMapBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00001000);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto result) {
        if (applicationResourceUsageMapBuilder_ == null) {
          if (((bitField0_ & 0x00000800) != 0)) {
            applicationResourceUsageMap_ = java.util.Collections.unmodifiableList(applicationResourceUsageMap_);
            bitField0_ = (bitField0_ & ~0x00000800);
          }
          result.applicationResourceUsageMap_ = applicationResourceUsageMap_;
        } else {
          result.applicationResourceUsageMap_ = applicationResourceUsageMapBuilder_.build();
        }
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          if (((bitField0_ & 0x00001000) != 0)) {
            applicationPreemptedResourceUsageMap_ = java.util.Collections.unmodifiableList(applicationPreemptedResourceUsageMap_);
            bitField0_ = (bitField0_ & ~0x00001000);
          }
          result.applicationPreemptedResourceUsageMap_ = applicationPreemptedResourceUsageMap_;
        } else {
          result.applicationPreemptedResourceUsageMap_ = applicationPreemptedResourceUsageMapBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.numUsedContainers_ = numUsedContainers_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.numReservedContainers_ = numReservedContainers_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.usedResources_ = usedResourcesBuilder_ == null
              ? usedResources_
              : usedResourcesBuilder_.build();
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.reservedResources_ = reservedResourcesBuilder_ == null
              ? reservedResources_
              : reservedResourcesBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.neededResources_ = neededResourcesBuilder_ == null
              ? neededResources_
              : neededResourcesBuilder_.build();
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.memorySeconds_ = memorySeconds_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.vcoreSeconds_ = vcoreSeconds_;
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.queueUsagePercentage_ = queueUsagePercentage_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.clusterUsagePercentage_ = clusterUsagePercentage_;
          to_bitField0_ |= 0x00000100;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          result.preemptedMemorySeconds_ = preemptedMemorySeconds_;
          to_bitField0_ |= 0x00000200;
        }
        if (((from_bitField0_ & 0x00000400) != 0)) {
          result.preemptedVcoreSeconds_ = preemptedVcoreSeconds_;
          to_bitField0_ |= 0x00000400;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance()) return this;
        if (other.hasNumUsedContainers()) {
          setNumUsedContainers(other.getNumUsedContainers());
        }
        if (other.hasNumReservedContainers()) {
          setNumReservedContainers(other.getNumReservedContainers());
        }
        if (other.hasUsedResources()) {
          mergeUsedResources(other.getUsedResources());
        }
        if (other.hasReservedResources()) {
          mergeReservedResources(other.getReservedResources());
        }
        if (other.hasNeededResources()) {
          mergeNeededResources(other.getNeededResources());
        }
        if (other.hasMemorySeconds()) {
          setMemorySeconds(other.getMemorySeconds());
        }
        if (other.hasVcoreSeconds()) {
          setVcoreSeconds(other.getVcoreSeconds());
        }
        if (other.hasQueueUsagePercentage()) {
          setQueueUsagePercentage(other.getQueueUsagePercentage());
        }
        if (other.hasClusterUsagePercentage()) {
          setClusterUsagePercentage(other.getClusterUsagePercentage());
        }
        if (other.hasPreemptedMemorySeconds()) {
          setPreemptedMemorySeconds(other.getPreemptedMemorySeconds());
        }
        if (other.hasPreemptedVcoreSeconds()) {
          setPreemptedVcoreSeconds(other.getPreemptedVcoreSeconds());
        }
        if (applicationResourceUsageMapBuilder_ == null) {
          if (!other.applicationResourceUsageMap_.isEmpty()) {
            if (applicationResourceUsageMap_.isEmpty()) {
              applicationResourceUsageMap_ = other.applicationResourceUsageMap_;
              bitField0_ = (bitField0_ & ~0x00000800);
            } else {
              ensureApplicationResourceUsageMapIsMutable();
              applicationResourceUsageMap_.addAll(other.applicationResourceUsageMap_);
            }
            onChanged();
          }
        } else {
          if (!other.applicationResourceUsageMap_.isEmpty()) {
            if (applicationResourceUsageMapBuilder_.isEmpty()) {
              applicationResourceUsageMapBuilder_.dispose();
              applicationResourceUsageMapBuilder_ = null;
              applicationResourceUsageMap_ = other.applicationResourceUsageMap_;
              bitField0_ = (bitField0_ & ~0x00000800);
              applicationResourceUsageMapBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationResourceUsageMapFieldBuilder() : null;
            } else {
              applicationResourceUsageMapBuilder_.addAllMessages(other.applicationResourceUsageMap_);
            }
          }
        }
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          if (!other.applicationPreemptedResourceUsageMap_.isEmpty()) {
            if (applicationPreemptedResourceUsageMap_.isEmpty()) {
              applicationPreemptedResourceUsageMap_ = other.applicationPreemptedResourceUsageMap_;
              bitField0_ = (bitField0_ & ~0x00001000);
            } else {
              ensureApplicationPreemptedResourceUsageMapIsMutable();
              applicationPreemptedResourceUsageMap_.addAll(other.applicationPreemptedResourceUsageMap_);
            }
            onChanged();
          }
        } else {
          if (!other.applicationPreemptedResourceUsageMap_.isEmpty()) {
            if (applicationPreemptedResourceUsageMapBuilder_.isEmpty()) {
              applicationPreemptedResourceUsageMapBuilder_.dispose();
              applicationPreemptedResourceUsageMapBuilder_ = null;
              applicationPreemptedResourceUsageMap_ = other.applicationPreemptedResourceUsageMap_;
              bitField0_ = (bitField0_ & ~0x00001000);
              applicationPreemptedResourceUsageMapBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationPreemptedResourceUsageMapFieldBuilder() : null;
            } else {
              applicationPreemptedResourceUsageMapBuilder_.addAllMessages(other.applicationPreemptedResourceUsageMap_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasUsedResources()) {
          if (!getUsedResources().isInitialized()) {
            return false;
          }
        }
        if (hasReservedResources()) {
          if (!getReservedResources().isInitialized()) {
            return false;
          }
        }
        if (hasNeededResources()) {
          if (!getNeededResources().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getApplicationResourceUsageMapCount(); i++) {
          if (!getApplicationResourceUsageMap(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getApplicationPreemptedResourceUsageMapCount(); i++) {
          if (!getApplicationPreemptedResourceUsageMap(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                numUsedContainers_ = input.readInt32();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 16: {
                numReservedContainers_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 26: {
                input.readMessage(
                    getUsedResourcesFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                input.readMessage(
                    getReservedResourcesFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                input.readMessage(
                    getNeededResourcesFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 48: {
                memorySeconds_ = input.readInt64();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 56: {
                vcoreSeconds_ = input.readInt64();
                bitField0_ |= 0x00000040;
                break;
              } // case 56
              case 69: {
                queueUsagePercentage_ = input.readFloat();
                bitField0_ |= 0x00000080;
                break;
              } // case 69
              case 77: {
                clusterUsagePercentage_ = input.readFloat();
                bitField0_ |= 0x00000100;
                break;
              } // case 77
              case 80: {
                preemptedMemorySeconds_ = input.readInt64();
                bitField0_ |= 0x00000200;
                break;
              } // case 80
              case 88: {
                preemptedVcoreSeconds_ = input.readInt64();
                bitField0_ |= 0x00000400;
                break;
              } // case 88
              case 98: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.PARSER,
                        extensionRegistry);
                if (applicationResourceUsageMapBuilder_ == null) {
                  ensureApplicationResourceUsageMapIsMutable();
                  applicationResourceUsageMap_.add(m);
                } else {
                  applicationResourceUsageMapBuilder_.addMessage(m);
                }
                break;
              } // case 98
              case 106: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.PARSER,
                        extensionRegistry);
                if (applicationPreemptedResourceUsageMapBuilder_ == null) {
                  ensureApplicationPreemptedResourceUsageMapIsMutable();
                  applicationPreemptedResourceUsageMap_.add(m);
                } else {
                  applicationPreemptedResourceUsageMapBuilder_.addMessage(m);
                }
                break;
              } // case 106
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int numUsedContainers_ ;
      /**
       * <code>optional int32 num_used_containers = 1;</code>
       * @return Whether the numUsedContainers field is set.
       */
      @java.lang.Override
      public boolean hasNumUsedContainers() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int32 num_used_containers = 1;</code>
       * @return The numUsedContainers.
       */
      @java.lang.Override
      public int getNumUsedContainers() {
        return numUsedContainers_;
      }
      /**
       * <code>optional int32 num_used_containers = 1;</code>
       * @param value The numUsedContainers to set.
       * @return This builder for chaining.
       */
      public Builder setNumUsedContainers(int value) {

        numUsedContainers_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_used_containers = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumUsedContainers() {
        bitField0_ = (bitField0_ & ~0x00000001);
        numUsedContainers_ = 0;
        onChanged();
        return this;
      }

      private int numReservedContainers_ ;
      /**
       * <code>optional int32 num_reserved_containers = 2;</code>
       * @return Whether the numReservedContainers field is set.
       */
      @java.lang.Override
      public boolean hasNumReservedContainers() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 num_reserved_containers = 2;</code>
       * @return The numReservedContainers.
       */
      @java.lang.Override
      public int getNumReservedContainers() {
        return numReservedContainers_;
      }
      /**
       * <code>optional int32 num_reserved_containers = 2;</code>
       * @param value The numReservedContainers to set.
       * @return This builder for chaining.
       */
      public Builder setNumReservedContainers(int value) {

        numReservedContainers_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_reserved_containers = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumReservedContainers() {
        bitField0_ = (bitField0_ & ~0x00000002);
        numReservedContainers_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto usedResources_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> usedResourcesBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
       * @return Whether the usedResources field is set.
       */
      public boolean hasUsedResources() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
       * @return The usedResources.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsedResources() {
        if (usedResourcesBuilder_ == null) {
          return usedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : usedResources_;
        } else {
          return usedResourcesBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
       */
      public Builder setUsedResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (usedResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          usedResources_ = value;
        } else {
          usedResourcesBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
       */
      public Builder setUsedResources(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (usedResourcesBuilder_ == null) {
          usedResources_ = builderForValue.build();
        } else {
          usedResourcesBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
       */
      public Builder mergeUsedResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (usedResourcesBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0) &&
            usedResources_ != null &&
            usedResources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getUsedResourcesBuilder().mergeFrom(value);
          } else {
            usedResources_ = value;
          }
        } else {
          usedResourcesBuilder_.mergeFrom(value);
        }
        if (usedResources_ != null) {
          bitField0_ |= 0x00000004;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
       */
      public Builder clearUsedResources() {
        bitField0_ = (bitField0_ & ~0x00000004);
        usedResources_ = null;
        if (usedResourcesBuilder_ != null) {
          usedResourcesBuilder_.dispose();
          usedResourcesBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getUsedResourcesBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getUsedResourcesFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedResourcesOrBuilder() {
        if (usedResourcesBuilder_ != null) {
          return usedResourcesBuilder_.getMessageOrBuilder();
        } else {
          return usedResources_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : usedResources_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used_resources = 3;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getUsedResourcesFieldBuilder() {
        if (usedResourcesBuilder_ == null) {
          usedResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getUsedResources(),
                  getParentForChildren(),
                  isClean());
          usedResources_ = null;
        }
        return usedResourcesBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto reservedResources_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> reservedResourcesBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
       * @return Whether the reservedResources field is set.
       */
      public boolean hasReservedResources() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
       * @return The reservedResources.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getReservedResources() {
        if (reservedResourcesBuilder_ == null) {
          return reservedResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : reservedResources_;
        } else {
          return reservedResourcesBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
       */
      public Builder setReservedResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (reservedResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservedResources_ = value;
        } else {
          reservedResourcesBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
       */
      public Builder setReservedResources(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (reservedResourcesBuilder_ == null) {
          reservedResources_ = builderForValue.build();
        } else {
          reservedResourcesBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
       */
      public Builder mergeReservedResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (reservedResourcesBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
            reservedResources_ != null &&
            reservedResources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getReservedResourcesBuilder().mergeFrom(value);
          } else {
            reservedResources_ = value;
          }
        } else {
          reservedResourcesBuilder_.mergeFrom(value);
        }
        if (reservedResources_ != null) {
          bitField0_ |= 0x00000008;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
       */
      public Builder clearReservedResources() {
        bitField0_ = (bitField0_ & ~0x00000008);
        reservedResources_ = null;
        if (reservedResourcesBuilder_ != null) {
          reservedResourcesBuilder_.dispose();
          reservedResourcesBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getReservedResourcesBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getReservedResourcesFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getReservedResourcesOrBuilder() {
        if (reservedResourcesBuilder_ != null) {
          return reservedResourcesBuilder_.getMessageOrBuilder();
        } else {
          return reservedResources_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : reservedResources_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto reserved_resources = 4;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getReservedResourcesFieldBuilder() {
        if (reservedResourcesBuilder_ == null) {
          reservedResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getReservedResources(),
                  getParentForChildren(),
                  isClean());
          reservedResources_ = null;
        }
        return reservedResourcesBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto neededResources_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> neededResourcesBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
       * @return Whether the neededResources field is set.
       */
      public boolean hasNeededResources() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
       * @return The neededResources.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getNeededResources() {
        if (neededResourcesBuilder_ == null) {
          return neededResources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : neededResources_;
        } else {
          return neededResourcesBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
       */
      public Builder setNeededResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (neededResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          neededResources_ = value;
        } else {
          neededResourcesBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
       */
      public Builder setNeededResources(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (neededResourcesBuilder_ == null) {
          neededResources_ = builderForValue.build();
        } else {
          neededResourcesBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
       */
      public Builder mergeNeededResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (neededResourcesBuilder_ == null) {
          if (((bitField0_ & 0x00000010) != 0) &&
            neededResources_ != null &&
            neededResources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getNeededResourcesBuilder().mergeFrom(value);
          } else {
            neededResources_ = value;
          }
        } else {
          neededResourcesBuilder_.mergeFrom(value);
        }
        if (neededResources_ != null) {
          bitField0_ |= 0x00000010;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
       */
      public Builder clearNeededResources() {
        bitField0_ = (bitField0_ & ~0x00000010);
        neededResources_ = null;
        if (neededResourcesBuilder_ != null) {
          neededResourcesBuilder_.dispose();
          neededResourcesBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getNeededResourcesBuilder() {
        bitField0_ |= 0x00000010;
        onChanged();
        return getNeededResourcesFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getNeededResourcesOrBuilder() {
        if (neededResourcesBuilder_ != null) {
          return neededResourcesBuilder_.getMessageOrBuilder();
        } else {
          return neededResources_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : neededResources_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto needed_resources = 5;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getNeededResourcesFieldBuilder() {
        if (neededResourcesBuilder_ == null) {
          neededResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getNeededResources(),
                  getParentForChildren(),
                  isClean());
          neededResources_ = null;
        }
        return neededResourcesBuilder_;
      }

      private long memorySeconds_ ;
      /**
       * <code>optional int64 memory_seconds = 6;</code>
       * @return Whether the memorySeconds field is set.
       */
      @java.lang.Override
      public boolean hasMemorySeconds() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional int64 memory_seconds = 6;</code>
       * @return The memorySeconds.
       */
      @java.lang.Override
      public long getMemorySeconds() {
        return memorySeconds_;
      }
      /**
       * <code>optional int64 memory_seconds = 6;</code>
       * @param value The memorySeconds to set.
       * @return This builder for chaining.
       */
      public Builder setMemorySeconds(long value) {

        memorySeconds_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 memory_seconds = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearMemorySeconds() {
        bitField0_ = (bitField0_ & ~0x00000020);
        memorySeconds_ = 0L;
        onChanged();
        return this;
      }

      private long vcoreSeconds_ ;
      /**
       * <code>optional int64 vcore_seconds = 7;</code>
       * @return Whether the vcoreSeconds field is set.
       */
      @java.lang.Override
      public boolean hasVcoreSeconds() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional int64 vcore_seconds = 7;</code>
       * @return The vcoreSeconds.
       */
      @java.lang.Override
      public long getVcoreSeconds() {
        return vcoreSeconds_;
      }
      /**
       * <code>optional int64 vcore_seconds = 7;</code>
       * @param value The vcoreSeconds to set.
       * @return This builder for chaining.
       */
      public Builder setVcoreSeconds(long value) {

        vcoreSeconds_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 vcore_seconds = 7;</code>
       * @return This builder for chaining.
       */
      public Builder clearVcoreSeconds() {
        bitField0_ = (bitField0_ & ~0x00000040);
        vcoreSeconds_ = 0L;
        onChanged();
        return this;
      }

      private float queueUsagePercentage_ ;
      /**
       * <code>optional float queue_usage_percentage = 8;</code>
       * @return Whether the queueUsagePercentage field is set.
       */
      @java.lang.Override
      public boolean hasQueueUsagePercentage() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional float queue_usage_percentage = 8;</code>
       * @return The queueUsagePercentage.
       */
      @java.lang.Override
      public float getQueueUsagePercentage() {
        return queueUsagePercentage_;
      }
      /**
       * <code>optional float queue_usage_percentage = 8;</code>
       * @param value The queueUsagePercentage to set.
       * @return This builder for chaining.
       */
      public Builder setQueueUsagePercentage(float value) {

        queueUsagePercentage_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional float queue_usage_percentage = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueueUsagePercentage() {
        bitField0_ = (bitField0_ & ~0x00000080);
        queueUsagePercentage_ = 0F;
        onChanged();
        return this;
      }

      private float clusterUsagePercentage_ ;
      /**
       * <code>optional float cluster_usage_percentage = 9;</code>
       * @return Whether the clusterUsagePercentage field is set.
       */
      @java.lang.Override
      public boolean hasClusterUsagePercentage() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional float cluster_usage_percentage = 9;</code>
       * @return The clusterUsagePercentage.
       */
      @java.lang.Override
      public float getClusterUsagePercentage() {
        return clusterUsagePercentage_;
      }
      /**
       * <code>optional float cluster_usage_percentage = 9;</code>
       * @param value The clusterUsagePercentage to set.
       * @return This builder for chaining.
       */
      public Builder setClusterUsagePercentage(float value) {

        clusterUsagePercentage_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional float cluster_usage_percentage = 9;</code>
       * @return This builder for chaining.
       */
      public Builder clearClusterUsagePercentage() {
        bitField0_ = (bitField0_ & ~0x00000100);
        clusterUsagePercentage_ = 0F;
        onChanged();
        return this;
      }

      private long preemptedMemorySeconds_ ;
      /**
       * <code>optional int64 preempted_memory_seconds = 10;</code>
       * @return Whether the preemptedMemorySeconds field is set.
       */
      @java.lang.Override
      public boolean hasPreemptedMemorySeconds() {
        return ((bitField0_ & 0x00000200) != 0);
      }
      /**
       * <code>optional int64 preempted_memory_seconds = 10;</code>
       * @return The preemptedMemorySeconds.
       */
      @java.lang.Override
      public long getPreemptedMemorySeconds() {
        return preemptedMemorySeconds_;
      }
      /**
       * <code>optional int64 preempted_memory_seconds = 10;</code>
       * @param value The preemptedMemorySeconds to set.
       * @return This builder for chaining.
       */
      public Builder setPreemptedMemorySeconds(long value) {

        preemptedMemorySeconds_ = value;
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 preempted_memory_seconds = 10;</code>
       * @return This builder for chaining.
       */
      public Builder clearPreemptedMemorySeconds() {
        bitField0_ = (bitField0_ & ~0x00000200);
        preemptedMemorySeconds_ = 0L;
        onChanged();
        return this;
      }

      private long preemptedVcoreSeconds_ ;
      /**
       * <code>optional int64 preempted_vcore_seconds = 11;</code>
       * @return Whether the preemptedVcoreSeconds field is set.
       */
      @java.lang.Override
      public boolean hasPreemptedVcoreSeconds() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * <code>optional int64 preempted_vcore_seconds = 11;</code>
       * @return The preemptedVcoreSeconds.
       */
      @java.lang.Override
      public long getPreemptedVcoreSeconds() {
        return preemptedVcoreSeconds_;
      }
      /**
       * <code>optional int64 preempted_vcore_seconds = 11;</code>
       * @param value The preemptedVcoreSeconds to set.
       * @return This builder for chaining.
       */
      public Builder setPreemptedVcoreSeconds(long value) {

        preemptedVcoreSeconds_ = value;
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 preempted_vcore_seconds = 11;</code>
       * @return This builder for chaining.
       */
      public Builder clearPreemptedVcoreSeconds() {
        bitField0_ = (bitField0_ & ~0x00000400);
        preemptedVcoreSeconds_ = 0L;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> applicationResourceUsageMap_ =
        java.util.Collections.emptyList();
      private void ensureApplicationResourceUsageMapIsMutable() {
        if (!((bitField0_ & 0x00000800) != 0)) {
          applicationResourceUsageMap_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto>(applicationResourceUsageMap_);
          bitField0_ |= 0x00000800;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> applicationResourceUsageMapBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> getApplicationResourceUsageMapList() {
        if (applicationResourceUsageMapBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applicationResourceUsageMap_);
        } else {
          return applicationResourceUsageMapBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public int getApplicationResourceUsageMapCount() {
        if (applicationResourceUsageMapBuilder_ == null) {
          return applicationResourceUsageMap_.size();
        } else {
          return applicationResourceUsageMapBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationResourceUsageMap(int index) {
        if (applicationResourceUsageMapBuilder_ == null) {
          return applicationResourceUsageMap_.get(index);
        } else {
          return applicationResourceUsageMapBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public Builder setApplicationResourceUsageMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) {
        if (applicationResourceUsageMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationResourceUsageMapIsMutable();
          applicationResourceUsageMap_.set(index, value);
          onChanged();
        } else {
          applicationResourceUsageMapBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public Builder setApplicationResourceUsageMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) {
        if (applicationResourceUsageMapBuilder_ == null) {
          ensureApplicationResourceUsageMapIsMutable();
          applicationResourceUsageMap_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationResourceUsageMapBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public Builder addApplicationResourceUsageMap(org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) {
        if (applicationResourceUsageMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationResourceUsageMapIsMutable();
          applicationResourceUsageMap_.add(value);
          onChanged();
        } else {
          applicationResourceUsageMapBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public Builder addApplicationResourceUsageMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) {
        if (applicationResourceUsageMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationResourceUsageMapIsMutable();
          applicationResourceUsageMap_.add(index, value);
          onChanged();
        } else {
          applicationResourceUsageMapBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public Builder addApplicationResourceUsageMap(
          org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) {
        if (applicationResourceUsageMapBuilder_ == null) {
          ensureApplicationResourceUsageMapIsMutable();
          applicationResourceUsageMap_.add(builderForValue.build());
          onChanged();
        } else {
          applicationResourceUsageMapBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public Builder addApplicationResourceUsageMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) {
        if (applicationResourceUsageMapBuilder_ == null) {
          ensureApplicationResourceUsageMapIsMutable();
          applicationResourceUsageMap_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationResourceUsageMapBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public Builder addAllApplicationResourceUsageMap(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> values) {
        if (applicationResourceUsageMapBuilder_ == null) {
          ensureApplicationResourceUsageMapIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applicationResourceUsageMap_);
          onChanged();
        } else {
          applicationResourceUsageMapBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public Builder clearApplicationResourceUsageMap() {
        if (applicationResourceUsageMapBuilder_ == null) {
          applicationResourceUsageMap_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000800);
          onChanged();
        } else {
          applicationResourceUsageMapBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public Builder removeApplicationResourceUsageMap(int index) {
        if (applicationResourceUsageMapBuilder_ == null) {
          ensureApplicationResourceUsageMapIsMutable();
          applicationResourceUsageMap_.remove(index);
          onChanged();
        } else {
          applicationResourceUsageMapBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder getApplicationResourceUsageMapBuilder(
          int index) {
        return getApplicationResourceUsageMapFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationResourceUsageMapOrBuilder(
          int index) {
        if (applicationResourceUsageMapBuilder_ == null) {
          return applicationResourceUsageMap_.get(index);  } else {
          return applicationResourceUsageMapBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> 
           getApplicationResourceUsageMapOrBuilderList() {
        if (applicationResourceUsageMapBuilder_ != null) {
          return applicationResourceUsageMapBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applicationResourceUsageMap_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder addApplicationResourceUsageMapBuilder() {
        return getApplicationResourceUsageMapFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder addApplicationResourceUsageMapBuilder(
          int index) {
        return getApplicationResourceUsageMapFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_resource_usage_map = 12;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder> 
           getApplicationResourceUsageMapBuilderList() {
        return getApplicationResourceUsageMapFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> 
          getApplicationResourceUsageMapFieldBuilder() {
        if (applicationResourceUsageMapBuilder_ == null) {
          applicationResourceUsageMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder>(
                  applicationResourceUsageMap_,
                  ((bitField0_ & 0x00000800) != 0),
                  getParentForChildren(),
                  isClean());
          applicationResourceUsageMap_ = null;
        }
        return applicationResourceUsageMapBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> applicationPreemptedResourceUsageMap_ =
        java.util.Collections.emptyList();
      private void ensureApplicationPreemptedResourceUsageMapIsMutable() {
        if (!((bitField0_ & 0x00001000) != 0)) {
          applicationPreemptedResourceUsageMap_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto>(applicationPreemptedResourceUsageMap_);
          bitField0_ |= 0x00001000;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> applicationPreemptedResourceUsageMapBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> getApplicationPreemptedResourceUsageMapList() {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applicationPreemptedResourceUsageMap_);
        } else {
          return applicationPreemptedResourceUsageMapBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public int getApplicationPreemptedResourceUsageMapCount() {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          return applicationPreemptedResourceUsageMap_.size();
        } else {
          return applicationPreemptedResourceUsageMapBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto getApplicationPreemptedResourceUsageMap(int index) {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          return applicationPreemptedResourceUsageMap_.get(index);
        } else {
          return applicationPreemptedResourceUsageMapBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public Builder setApplicationPreemptedResourceUsageMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationPreemptedResourceUsageMapIsMutable();
          applicationPreemptedResourceUsageMap_.set(index, value);
          onChanged();
        } else {
          applicationPreemptedResourceUsageMapBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public Builder setApplicationPreemptedResourceUsageMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          ensureApplicationPreemptedResourceUsageMapIsMutable();
          applicationPreemptedResourceUsageMap_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationPreemptedResourceUsageMapBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public Builder addApplicationPreemptedResourceUsageMap(org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationPreemptedResourceUsageMapIsMutable();
          applicationPreemptedResourceUsageMap_.add(value);
          onChanged();
        } else {
          applicationPreemptedResourceUsageMapBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public Builder addApplicationPreemptedResourceUsageMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto value) {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationPreemptedResourceUsageMapIsMutable();
          applicationPreemptedResourceUsageMap_.add(index, value);
          onChanged();
        } else {
          applicationPreemptedResourceUsageMapBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public Builder addApplicationPreemptedResourceUsageMap(
          org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          ensureApplicationPreemptedResourceUsageMapIsMutable();
          applicationPreemptedResourceUsageMap_.add(builderForValue.build());
          onChanged();
        } else {
          applicationPreemptedResourceUsageMapBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public Builder addApplicationPreemptedResourceUsageMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder builderForValue) {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          ensureApplicationPreemptedResourceUsageMapIsMutable();
          applicationPreemptedResourceUsageMap_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationPreemptedResourceUsageMapBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public Builder addAllApplicationPreemptedResourceUsageMap(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto> values) {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          ensureApplicationPreemptedResourceUsageMapIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applicationPreemptedResourceUsageMap_);
          onChanged();
        } else {
          applicationPreemptedResourceUsageMapBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public Builder clearApplicationPreemptedResourceUsageMap() {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          applicationPreemptedResourceUsageMap_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00001000);
          onChanged();
        } else {
          applicationPreemptedResourceUsageMapBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public Builder removeApplicationPreemptedResourceUsageMap(int index) {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          ensureApplicationPreemptedResourceUsageMapIsMutable();
          applicationPreemptedResourceUsageMap_.remove(index);
          onChanged();
        } else {
          applicationPreemptedResourceUsageMapBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder getApplicationPreemptedResourceUsageMapBuilder(
          int index) {
        return getApplicationPreemptedResourceUsageMapFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder getApplicationPreemptedResourceUsageMapOrBuilder(
          int index) {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          return applicationPreemptedResourceUsageMap_.get(index);  } else {
          return applicationPreemptedResourceUsageMapBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> 
           getApplicationPreemptedResourceUsageMapOrBuilderList() {
        if (applicationPreemptedResourceUsageMapBuilder_ != null) {
          return applicationPreemptedResourceUsageMapBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applicationPreemptedResourceUsageMap_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder addApplicationPreemptedResourceUsageMapBuilder() {
        return getApplicationPreemptedResourceUsageMapFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder addApplicationPreemptedResourceUsageMapBuilder(
          int index) {
        return getApplicationPreemptedResourceUsageMapFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringLongMapProto application_preempted_resource_usage_map = 13;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder> 
           getApplicationPreemptedResourceUsageMapBuilderList() {
        return getApplicationPreemptedResourceUsageMapFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder> 
          getApplicationPreemptedResourceUsageMapFieldBuilder() {
        if (applicationPreemptedResourceUsageMapBuilder_ == null) {
          applicationPreemptedResourceUsageMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLongMapProtoOrBuilder>(
                  applicationPreemptedResourceUsageMap_,
                  ((bitField0_ & 0x00001000) != 0),
                  getParentForChildren(),
                  isClean());
          applicationPreemptedResourceUsageMap_ = null;
        }
        return applicationPreemptedResourceUsageMapBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationResourceUsageReportProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationResourceUsageReportProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationResourceUsageReportProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ApplicationResourceUsageReportProto>() {
      @java.lang.Override
      public ApplicationResourceUsageReportProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationResourceUsageReportProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationResourceUsageReportProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationReportProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationReportProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * <code>optional string user = 2;</code>
     * @return Whether the user field is set.
     */
    boolean hasUser();
    /**
     * <code>optional string user = 2;</code>
     * @return The user.
     */
    java.lang.String getUser();
    /**
     * <code>optional string user = 2;</code>
     * @return The bytes for user.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes();

    /**
     * <code>optional string queue = 3;</code>
     * @return Whether the queue field is set.
     */
    boolean hasQueue();
    /**
     * <code>optional string queue = 3;</code>
     * @return The queue.
     */
    java.lang.String getQueue();
    /**
     * <code>optional string queue = 3;</code>
     * @return The bytes for queue.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes();

    /**
     * <code>optional string name = 4;</code>
     * @return Whether the name field is set.
     */
    boolean hasName();
    /**
     * <code>optional string name = 4;</code>
     * @return The name.
     */
    java.lang.String getName();
    /**
     * <code>optional string name = 4;</code>
     * @return The bytes for name.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes();

    /**
     * <code>optional string host = 5;</code>
     * @return Whether the host field is set.
     */
    boolean hasHost();
    /**
     * <code>optional string host = 5;</code>
     * @return The host.
     */
    java.lang.String getHost();
    /**
     * <code>optional string host = 5;</code>
     * @return The bytes for host.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes();

    /**
     * <code>optional int32 rpc_port = 6;</code>
     * @return Whether the rpcPort field is set.
     */
    boolean hasRpcPort();
    /**
     * <code>optional int32 rpc_port = 6;</code>
     * @return The rpcPort.
     */
    int getRpcPort();

    /**
     * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
     * @return Whether the clientToAmToken field is set.
     */
    boolean hasClientToAmToken();
    /**
     * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
     * @return The clientToAmToken.
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProto getClientToAmToken();
    /**
     * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getClientToAmTokenOrBuilder();

    /**
     * <code>optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8;</code>
     * @return Whether the yarnApplicationState field is set.
     */
    boolean hasYarnApplicationState();
    /**
     * <code>optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8;</code>
     * @return The yarnApplicationState.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState();

    /**
     * <code>optional string trackingUrl = 9;</code>
     * @return Whether the trackingUrl field is set.
     */
    boolean hasTrackingUrl();
    /**
     * <code>optional string trackingUrl = 9;</code>
     * @return The trackingUrl.
     */
    java.lang.String getTrackingUrl();
    /**
     * <code>optional string trackingUrl = 9;</code>
     * @return The bytes for trackingUrl.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes();

    /**
     * <code>optional string diagnostics = 10 [default = "N/A"];</code>
     * @return Whether the diagnostics field is set.
     */
    boolean hasDiagnostics();
    /**
     * <code>optional string diagnostics = 10 [default = "N/A"];</code>
     * @return The diagnostics.
     */
    java.lang.String getDiagnostics();
    /**
     * <code>optional string diagnostics = 10 [default = "N/A"];</code>
     * @return The bytes for diagnostics.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes();

    /**
     * <code>optional int64 startTime = 11;</code>
     * @return Whether the startTime field is set.
     */
    boolean hasStartTime();
    /**
     * <code>optional int64 startTime = 11;</code>
     * @return The startTime.
     */
    long getStartTime();

    /**
     * <code>optional int64 finishTime = 12;</code>
     * @return Whether the finishTime field is set.
     */
    boolean hasFinishTime();
    /**
     * <code>optional int64 finishTime = 12;</code>
     * @return The finishTime.
     */
    long getFinishTime();

    /**
     * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13;</code>
     * @return Whether the finalApplicationStatus field is set.
     */
    boolean hasFinalApplicationStatus();
    /**
     * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13;</code>
     * @return The finalApplicationStatus.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus();

    /**
     * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
     * @return Whether the appResourceUsage field is set.
     */
    boolean hasAppResourceUsage();
    /**
     * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
     * @return The appResourceUsage.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getAppResourceUsage();
    /**
     * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder getAppResourceUsageOrBuilder();

    /**
     * <code>optional string originalTrackingUrl = 15;</code>
     * @return Whether the originalTrackingUrl field is set.
     */
    boolean hasOriginalTrackingUrl();
    /**
     * <code>optional string originalTrackingUrl = 15;</code>
     * @return The originalTrackingUrl.
     */
    java.lang.String getOriginalTrackingUrl();
    /**
     * <code>optional string originalTrackingUrl = 15;</code>
     * @return The bytes for originalTrackingUrl.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getOriginalTrackingUrlBytes();

    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
     * @return Whether the currentApplicationAttemptId field is set.
     */
    boolean hasCurrentApplicationAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
     * @return The currentApplicationAttemptId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getCurrentApplicationAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getCurrentApplicationAttemptIdOrBuilder();

    /**
     * <code>optional float progress = 17;</code>
     * @return Whether the progress field is set.
     */
    boolean hasProgress();
    /**
     * <code>optional float progress = 17;</code>
     * @return The progress.
     */
    float getProgress();

    /**
     * <code>optional string applicationType = 18;</code>
     * @return Whether the applicationType field is set.
     */
    boolean hasApplicationType();
    /**
     * <code>optional string applicationType = 18;</code>
     * @return The applicationType.
     */
    java.lang.String getApplicationType();
    /**
     * <code>optional string applicationType = 18;</code>
     * @return The bytes for applicationType.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTypeBytes();

    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
     * @return Whether the amRmToken field is set.
     */
    boolean hasAmRmToken();
    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
     * @return The amRmToken.
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken();
    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder();

    /**
     * <code>repeated string applicationTags = 20;</code>
     * @return A list containing the applicationTags.
     */
    java.util.List<java.lang.String>
        getApplicationTagsList();
    /**
     * <code>repeated string applicationTags = 20;</code>
     * @return The count of applicationTags.
     */
    int getApplicationTagsCount();
    /**
     * <code>repeated string applicationTags = 20;</code>
     * @param index The index of the element to return.
     * @return The applicationTags at the given index.
     */
    java.lang.String getApplicationTags(int index);
    /**
     * <code>repeated string applicationTags = 20;</code>
     * @param index The index of the value to return.
     * @return The bytes of the applicationTags at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTagsBytes(int index);

    /**
     * <code>optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21;</code>
     * @return Whether the logAggregationStatus field is set.
     */
    boolean hasLogAggregationStatus();
    /**
     * <code>optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21;</code>
     * @return The logAggregationStatus.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto getLogAggregationStatus();

    /**
     * <code>optional bool unmanaged_application = 22 [default = false];</code>
     * @return Whether the unmanagedApplication field is set.
     */
    boolean hasUnmanagedApplication();
    /**
     * <code>optional bool unmanaged_application = 22 [default = false];</code>
     * @return The unmanagedApplication.
     */
    boolean getUnmanagedApplication();

    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
     * @return Whether the priority field is set.
     */
    boolean hasPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
     * @return The priority.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();

    /**
     * <code>optional string appNodeLabelExpression = 24;</code>
     * @return Whether the appNodeLabelExpression field is set.
     */
    boolean hasAppNodeLabelExpression();
    /**
     * <code>optional string appNodeLabelExpression = 24;</code>
     * @return The appNodeLabelExpression.
     */
    java.lang.String getAppNodeLabelExpression();
    /**
     * <code>optional string appNodeLabelExpression = 24;</code>
     * @return The bytes for appNodeLabelExpression.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAppNodeLabelExpressionBytes();

    /**
     * <code>optional string amNodeLabelExpression = 25;</code>
     * @return Whether the amNodeLabelExpression field is set.
     */
    boolean hasAmNodeLabelExpression();
    /**
     * <code>optional string amNodeLabelExpression = 25;</code>
     * @return The amNodeLabelExpression.
     */
    java.lang.String getAmNodeLabelExpression();
    /**
     * <code>optional string amNodeLabelExpression = 25;</code>
     * @return The bytes for amNodeLabelExpression.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAmNodeLabelExpressionBytes();

    /**
     * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto> 
        getAppTimeoutsList();
    /**
     * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getAppTimeouts(int index);
    /**
     * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
     */
    int getAppTimeoutsCount();
    /**
     * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder> 
        getAppTimeoutsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder getAppTimeoutsOrBuilder(
        int index);

    /**
     * <code>optional int64 launchTime = 27;</code>
     * @return Whether the launchTime field is set.
     */
    boolean hasLaunchTime();
    /**
     * <code>optional int64 launchTime = 27;</code>
     * @return The launchTime.
     */
    long getLaunchTime();

    /**
     * <code>optional int64 submitTime = 28;</code>
     * @return Whether the submitTime field is set.
     */
    boolean hasSubmitTime();
    /**
     * <code>optional int64 submitTime = 28;</code>
     * @return The submitTime.
     */
    long getSubmitTime();

    /**
     * <code>optional string rmClusterId = 29;</code>
     * @return Whether the rmClusterId field is set.
     */
    boolean hasRmClusterId();
    /**
     * <code>optional string rmClusterId = 29;</code>
     * @return The rmClusterId.
     */
    java.lang.String getRmClusterId();
    /**
     * <code>optional string rmClusterId = 29;</code>
     * @return The bytes for rmClusterId.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getRmClusterIdBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationReportProto}
   */
  public static final class ApplicationReportProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationReportProto)
      ApplicationReportProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationReportProto.newBuilder() to construct.
    private ApplicationReportProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ApplicationReportProto() {
      user_ = "";
      queue_ = "";
      name_ = "";
      host_ = "";
      yarnApplicationState_ = 1;
      trackingUrl_ = "";
      diagnostics_ = "N/A";
      finalApplicationStatus_ = 0;
      originalTrackingUrl_ = "";
      applicationType_ = "";
      applicationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      logAggregationStatus_ = 1;
      appNodeLabelExpression_ = "";
      amNodeLabelExpression_ = "";
      appTimeouts_ = java.util.Collections.emptyList();
      rmClusterId_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ApplicationReportProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationReportProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationReportProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATIONID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int USER_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object user_ = "";
    /**
     * <code>optional string user = 2;</code>
     * @return Whether the user field is set.
     */
    @java.lang.Override
    public boolean hasUser() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string user = 2;</code>
     * @return The user.
     */
    @java.lang.Override
    public java.lang.String getUser() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          user_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string user = 2;</code>
     * @return The bytes for user.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        user_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int QUEUE_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object queue_ = "";
    /**
     * <code>optional string queue = 3;</code>
     * @return Whether the queue field is set.
     */
    @java.lang.Override
    public boolean hasQueue() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string queue = 3;</code>
     * @return The queue.
     */
    @java.lang.Override
    public java.lang.String getQueue() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queue_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string queue = 3;</code>
     * @return The bytes for queue.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int NAME_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private volatile java.lang.Object name_ = "";
    /**
     * <code>optional string name = 4;</code>
     * @return Whether the name field is set.
     */
    @java.lang.Override
    public boolean hasName() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional string name = 4;</code>
     * @return The name.
     */
    @java.lang.Override
    public java.lang.String getName() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          name_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string name = 4;</code>
     * @return The bytes for name.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        name_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int HOST_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private volatile java.lang.Object host_ = "";
    /**
     * <code>optional string host = 5;</code>
     * @return Whether the host field is set.
     */
    @java.lang.Override
    public boolean hasHost() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional string host = 5;</code>
     * @return The host.
     */
    @java.lang.Override
    public java.lang.String getHost() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          host_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string host = 5;</code>
     * @return The bytes for host.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        host_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RPC_PORT_FIELD_NUMBER = 6;
    private int rpcPort_ = 0;
    /**
     * <code>optional int32 rpc_port = 6;</code>
     * @return Whether the rpcPort field is set.
     */
    @java.lang.Override
    public boolean hasRpcPort() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional int32 rpc_port = 6;</code>
     * @return The rpcPort.
     */
    @java.lang.Override
    public int getRpcPort() {
      return rpcPort_;
    }

    public static final int CLIENT_TO_AM_TOKEN_FIELD_NUMBER = 7;
    private org.apache.hadoop.security.proto.SecurityProtos.TokenProto clientToAmToken_;
    /**
     * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
     * @return Whether the clientToAmToken field is set.
     */
    @java.lang.Override
    public boolean hasClientToAmToken() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
     * @return The clientToAmToken.
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getClientToAmToken() {
      return clientToAmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : clientToAmToken_;
    }
    /**
     * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getClientToAmTokenOrBuilder() {
      return clientToAmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : clientToAmToken_;
    }

    public static final int YARN_APPLICATION_STATE_FIELD_NUMBER = 8;
    private int yarnApplicationState_ = 1;
    /**
     * <code>optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8;</code>
     * @return Whether the yarnApplicationState field is set.
     */
    @java.lang.Override public boolean hasYarnApplicationState() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8;</code>
     * @return The yarnApplicationState.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState() {
      org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.forNumber(yarnApplicationState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.NEW : result;
    }

    public static final int TRACKINGURL_FIELD_NUMBER = 9;
    @SuppressWarnings("serial")
    private volatile java.lang.Object trackingUrl_ = "";
    /**
     * <code>optional string trackingUrl = 9;</code>
     * @return Whether the trackingUrl field is set.
     */
    @java.lang.Override
    public boolean hasTrackingUrl() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional string trackingUrl = 9;</code>
     * @return The trackingUrl.
     */
    @java.lang.Override
    public java.lang.String getTrackingUrl() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          trackingUrl_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string trackingUrl = 9;</code>
     * @return The bytes for trackingUrl.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        trackingUrl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int DIAGNOSTICS_FIELD_NUMBER = 10;
    @SuppressWarnings("serial")
    private volatile java.lang.Object diagnostics_ = "N/A";
    /**
     * <code>optional string diagnostics = 10 [default = "N/A"];</code>
     * @return Whether the diagnostics field is set.
     */
    @java.lang.Override
    public boolean hasDiagnostics() {
      return ((bitField0_ & 0x00000200) != 0);
    }
    /**
     * <code>optional string diagnostics = 10 [default = "N/A"];</code>
     * @return The diagnostics.
     */
    @java.lang.Override
    public java.lang.String getDiagnostics() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnostics_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string diagnostics = 10 [default = "N/A"];</code>
     * @return The bytes for diagnostics.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnostics_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int STARTTIME_FIELD_NUMBER = 11;
    private long startTime_ = 0L;
    /**
     * <code>optional int64 startTime = 11;</code>
     * @return Whether the startTime field is set.
     */
    @java.lang.Override
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000400) != 0);
    }
    /**
     * <code>optional int64 startTime = 11;</code>
     * @return The startTime.
     */
    @java.lang.Override
    public long getStartTime() {
      return startTime_;
    }

    public static final int FINISHTIME_FIELD_NUMBER = 12;
    private long finishTime_ = 0L;
    /**
     * <code>optional int64 finishTime = 12;</code>
     * @return Whether the finishTime field is set.
     */
    @java.lang.Override
    public boolean hasFinishTime() {
      return ((bitField0_ & 0x00000800) != 0);
    }
    /**
     * <code>optional int64 finishTime = 12;</code>
     * @return The finishTime.
     */
    @java.lang.Override
    public long getFinishTime() {
      return finishTime_;
    }

    public static final int FINAL_APPLICATION_STATUS_FIELD_NUMBER = 13;
    private int finalApplicationStatus_ = 0;
    /**
     * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13;</code>
     * @return Whether the finalApplicationStatus field is set.
     */
    @java.lang.Override public boolean hasFinalApplicationStatus() {
      return ((bitField0_ & 0x00001000) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13;</code>
     * @return The finalApplicationStatus.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
      org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.forNumber(finalApplicationStatus_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
    }

    public static final int APP_RESOURCE_USAGE_FIELD_NUMBER = 14;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto appResourceUsage_;
    /**
     * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
     * @return Whether the appResourceUsage field is set.
     */
    @java.lang.Override
    public boolean hasAppResourceUsage() {
      return ((bitField0_ & 0x00002000) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
     * @return The appResourceUsage.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getAppResourceUsage() {
      return appResourceUsage_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance() : appResourceUsage_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder getAppResourceUsageOrBuilder() {
      return appResourceUsage_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance() : appResourceUsage_;
    }

    public static final int ORIGINALTRACKINGURL_FIELD_NUMBER = 15;
    @SuppressWarnings("serial")
    private volatile java.lang.Object originalTrackingUrl_ = "";
    /**
     * <code>optional string originalTrackingUrl = 15;</code>
     * @return Whether the originalTrackingUrl field is set.
     */
    @java.lang.Override
    public boolean hasOriginalTrackingUrl() {
      return ((bitField0_ & 0x00004000) != 0);
    }
    /**
     * <code>optional string originalTrackingUrl = 15;</code>
     * @return The originalTrackingUrl.
     */
    @java.lang.Override
    public java.lang.String getOriginalTrackingUrl() {
      java.lang.Object ref = originalTrackingUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          originalTrackingUrl_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string originalTrackingUrl = 15;</code>
     * @return The bytes for originalTrackingUrl.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getOriginalTrackingUrlBytes() {
      java.lang.Object ref = originalTrackingUrl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        originalTrackingUrl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int CURRENTAPPLICATIONATTEMPTID_FIELD_NUMBER = 16;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto currentApplicationAttemptId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
     * @return Whether the currentApplicationAttemptId field is set.
     */
    @java.lang.Override
    public boolean hasCurrentApplicationAttemptId() {
      return ((bitField0_ & 0x00008000) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
     * @return The currentApplicationAttemptId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getCurrentApplicationAttemptId() {
      return currentApplicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : currentApplicationAttemptId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getCurrentApplicationAttemptIdOrBuilder() {
      return currentApplicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : currentApplicationAttemptId_;
    }

    public static final int PROGRESS_FIELD_NUMBER = 17;
    private float progress_ = 0F;
    /**
     * <code>optional float progress = 17;</code>
     * @return Whether the progress field is set.
     */
    @java.lang.Override
    public boolean hasProgress() {
      return ((bitField0_ & 0x00010000) != 0);
    }
    /**
     * <code>optional float progress = 17;</code>
     * @return The progress.
     */
    @java.lang.Override
    public float getProgress() {
      return progress_;
    }

    public static final int APPLICATIONTYPE_FIELD_NUMBER = 18;
    @SuppressWarnings("serial")
    private volatile java.lang.Object applicationType_ = "";
    /**
     * <code>optional string applicationType = 18;</code>
     * @return Whether the applicationType field is set.
     */
    @java.lang.Override
    public boolean hasApplicationType() {
      return ((bitField0_ & 0x00020000) != 0);
    }
    /**
     * <code>optional string applicationType = 18;</code>
     * @return The applicationType.
     */
    @java.lang.Override
    public java.lang.String getApplicationType() {
      java.lang.Object ref = applicationType_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          applicationType_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string applicationType = 18;</code>
     * @return The bytes for applicationType.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTypeBytes() {
      java.lang.Object ref = applicationType_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        applicationType_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int AM_RM_TOKEN_FIELD_NUMBER = 19;
    private org.apache.hadoop.security.proto.SecurityProtos.TokenProto amRmToken_;
    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
     * @return Whether the amRmToken field is set.
     */
    @java.lang.Override
    public boolean hasAmRmToken() {
      return ((bitField0_ & 0x00040000) != 0);
    }
    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
     * @return The amRmToken.
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken() {
      return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_;
    }
    /**
     * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder() {
      return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_;
    }

    public static final int APPLICATIONTAGS_FIELD_NUMBER = 20;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList applicationTags_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string applicationTags = 20;</code>
     * @return A list containing the applicationTags.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getApplicationTagsList() {
      return applicationTags_;
    }
    /**
     * <code>repeated string applicationTags = 20;</code>
     * @return The count of applicationTags.
     */
    public int getApplicationTagsCount() {
      return applicationTags_.size();
    }
    /**
     * <code>repeated string applicationTags = 20;</code>
     * @param index The index of the element to return.
     * @return The applicationTags at the given index.
     */
    public java.lang.String getApplicationTags(int index) {
      return applicationTags_.get(index);
    }
    /**
     * <code>repeated string applicationTags = 20;</code>
     * @param index The index of the value to return.
     * @return The bytes of the applicationTags at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTagsBytes(int index) {
      return applicationTags_.getByteString(index);
    }

    public static final int LOG_AGGREGATION_STATUS_FIELD_NUMBER = 21;
    private int logAggregationStatus_ = 1;
    /**
     * <code>optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21;</code>
     * @return Whether the logAggregationStatus field is set.
     */
    @java.lang.Override public boolean hasLogAggregationStatus() {
      return ((bitField0_ & 0x00080000) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21;</code>
     * @return The logAggregationStatus.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto getLogAggregationStatus() {
      org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.forNumber(logAggregationStatus_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.LOG_DISABLED : result;
    }

    public static final int UNMANAGED_APPLICATION_FIELD_NUMBER = 22;
    private boolean unmanagedApplication_ = false;
    /**
     * <code>optional bool unmanaged_application = 22 [default = false];</code>
     * @return Whether the unmanagedApplication field is set.
     */
    @java.lang.Override
    public boolean hasUnmanagedApplication() {
      return ((bitField0_ & 0x00100000) != 0);
    }
    /**
     * <code>optional bool unmanaged_application = 22 [default = false];</code>
     * @return The unmanagedApplication.
     */
    @java.lang.Override
    public boolean getUnmanagedApplication() {
      return unmanagedApplication_;
    }

    public static final int PRIORITY_FIELD_NUMBER = 23;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
     * @return Whether the priority field is set.
     */
    @java.lang.Override
    public boolean hasPriority() {
      return ((bitField0_ & 0x00200000) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
     * @return The priority.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }

    public static final int APPNODELABELEXPRESSION_FIELD_NUMBER = 24;
    @SuppressWarnings("serial")
    private volatile java.lang.Object appNodeLabelExpression_ = "";
    /**
     * <code>optional string appNodeLabelExpression = 24;</code>
     * @return Whether the appNodeLabelExpression field is set.
     */
    @java.lang.Override
    public boolean hasAppNodeLabelExpression() {
      return ((bitField0_ & 0x00400000) != 0);
    }
    /**
     * <code>optional string appNodeLabelExpression = 24;</code>
     * @return The appNodeLabelExpression.
     */
    @java.lang.Override
    public java.lang.String getAppNodeLabelExpression() {
      java.lang.Object ref = appNodeLabelExpression_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          appNodeLabelExpression_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string appNodeLabelExpression = 24;</code>
     * @return The bytes for appNodeLabelExpression.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAppNodeLabelExpressionBytes() {
      java.lang.Object ref = appNodeLabelExpression_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        appNodeLabelExpression_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int AMNODELABELEXPRESSION_FIELD_NUMBER = 25;
    @SuppressWarnings("serial")
    private volatile java.lang.Object amNodeLabelExpression_ = "";
    /**
     * <code>optional string amNodeLabelExpression = 25;</code>
     * @return Whether the amNodeLabelExpression field is set.
     */
    @java.lang.Override
    public boolean hasAmNodeLabelExpression() {
      return ((bitField0_ & 0x00800000) != 0);
    }
    /**
     * <code>optional string amNodeLabelExpression = 25;</code>
     * @return The amNodeLabelExpression.
     */
    @java.lang.Override
    public java.lang.String getAmNodeLabelExpression() {
      java.lang.Object ref = amNodeLabelExpression_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          amNodeLabelExpression_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string amNodeLabelExpression = 25;</code>
     * @return The bytes for amNodeLabelExpression.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAmNodeLabelExpressionBytes() {
      java.lang.Object ref = amNodeLabelExpression_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        amNodeLabelExpression_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int APPTIMEOUTS_FIELD_NUMBER = 26;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto> appTimeouts_;
    /**
     * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto> getAppTimeoutsList() {
      return appTimeouts_;
    }
    /**
     * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder> 
        getAppTimeoutsOrBuilderList() {
      return appTimeouts_;
    }
    /**
     * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
     */
    @java.lang.Override
    public int getAppTimeoutsCount() {
      return appTimeouts_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getAppTimeouts(int index) {
      return appTimeouts_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder getAppTimeoutsOrBuilder(
        int index) {
      return appTimeouts_.get(index);
    }

    public static final int LAUNCHTIME_FIELD_NUMBER = 27;
    private long launchTime_ = 0L;
    /**
     * <code>optional int64 launchTime = 27;</code>
     * @return Whether the launchTime field is set.
     */
    @java.lang.Override
    public boolean hasLaunchTime() {
      return ((bitField0_ & 0x01000000) != 0);
    }
    /**
     * <code>optional int64 launchTime = 27;</code>
     * @return The launchTime.
     */
    @java.lang.Override
    public long getLaunchTime() {
      return launchTime_;
    }

    public static final int SUBMITTIME_FIELD_NUMBER = 28;
    private long submitTime_ = 0L;
    /**
     * <code>optional int64 submitTime = 28;</code>
     * @return Whether the submitTime field is set.
     */
    @java.lang.Override
    public boolean hasSubmitTime() {
      return ((bitField0_ & 0x02000000) != 0);
    }
    /**
     * <code>optional int64 submitTime = 28;</code>
     * @return The submitTime.
     */
    @java.lang.Override
    public long getSubmitTime() {
      return submitTime_;
    }

    public static final int RMCLUSTERID_FIELD_NUMBER = 29;
    @SuppressWarnings("serial")
    private volatile java.lang.Object rmClusterId_ = "";
    /**
     * <code>optional string rmClusterId = 29;</code>
     * @return Whether the rmClusterId field is set.
     */
    @java.lang.Override
    public boolean hasRmClusterId() {
      return ((bitField0_ & 0x04000000) != 0);
    }
    /**
     * <code>optional string rmClusterId = 29;</code>
     * @return The rmClusterId.
     */
    @java.lang.Override
    public java.lang.String getRmClusterId() {
      java.lang.Object ref = rmClusterId_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          rmClusterId_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string rmClusterId = 29;</code>
     * @return The bytes for rmClusterId.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getRmClusterIdBytes() {
      java.lang.Object ref = rmClusterId_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        rmClusterId_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasClientToAmToken()) {
        if (!getClientToAmToken().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasAppResourceUsage()) {
        if (!getAppResourceUsage().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasAmRmToken()) {
        if (!getAmRmToken().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getAppTimeoutsCount(); i++) {
        if (!getAppTimeouts(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, user_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, queue_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, name_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, host_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt32(6, rpcPort_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeMessage(7, getClientToAmToken());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeEnum(8, yarnApplicationState_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 9, trackingUrl_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 10, diagnostics_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        output.writeInt64(11, startTime_);
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        output.writeInt64(12, finishTime_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        output.writeEnum(13, finalApplicationStatus_);
      }
      if (((bitField0_ & 0x00002000) != 0)) {
        output.writeMessage(14, getAppResourceUsage());
      }
      if (((bitField0_ & 0x00004000) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 15, originalTrackingUrl_);
      }
      if (((bitField0_ & 0x00008000) != 0)) {
        output.writeMessage(16, getCurrentApplicationAttemptId());
      }
      if (((bitField0_ & 0x00010000) != 0)) {
        output.writeFloat(17, progress_);
      }
      if (((bitField0_ & 0x00020000) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 18, applicationType_);
      }
      if (((bitField0_ & 0x00040000) != 0)) {
        output.writeMessage(19, getAmRmToken());
      }
      for (int i = 0; i < applicationTags_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 20, applicationTags_.getRaw(i));
      }
      if (((bitField0_ & 0x00080000) != 0)) {
        output.writeEnum(21, logAggregationStatus_);
      }
      if (((bitField0_ & 0x00100000) != 0)) {
        output.writeBool(22, unmanagedApplication_);
      }
      if (((bitField0_ & 0x00200000) != 0)) {
        output.writeMessage(23, getPriority());
      }
      if (((bitField0_ & 0x00400000) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 24, appNodeLabelExpression_);
      }
      if (((bitField0_ & 0x00800000) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 25, amNodeLabelExpression_);
      }
      for (int i = 0; i < appTimeouts_.size(); i++) {
        output.writeMessage(26, appTimeouts_.get(i));
      }
      if (((bitField0_ & 0x01000000) != 0)) {
        output.writeInt64(27, launchTime_);
      }
      if (((bitField0_ & 0x02000000) != 0)) {
        output.writeInt64(28, submitTime_);
      }
      if (((bitField0_ & 0x04000000) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 29, rmClusterId_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, user_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, queue_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, name_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, host_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(6, rpcPort_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, getClientToAmToken());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(8, yarnApplicationState_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(9, trackingUrl_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(10, diagnostics_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(11, startTime_);
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(12, finishTime_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(13, finalApplicationStatus_);
      }
      if (((bitField0_ & 0x00002000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(14, getAppResourceUsage());
      }
      if (((bitField0_ & 0x00004000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(15, originalTrackingUrl_);
      }
      if (((bitField0_ & 0x00008000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(16, getCurrentApplicationAttemptId());
      }
      if (((bitField0_ & 0x00010000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(17, progress_);
      }
      if (((bitField0_ & 0x00020000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(18, applicationType_);
      }
      if (((bitField0_ & 0x00040000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(19, getAmRmToken());
      }
      {
        int dataSize = 0;
        for (int i = 0; i < applicationTags_.size(); i++) {
          dataSize += computeStringSizeNoTag(applicationTags_.getRaw(i));
        }
        size += dataSize;
        size += 2 * getApplicationTagsList().size();
      }
      if (((bitField0_ & 0x00080000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(21, logAggregationStatus_);
      }
      if (((bitField0_ & 0x00100000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(22, unmanagedApplication_);
      }
      if (((bitField0_ & 0x00200000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(23, getPriority());
      }
      if (((bitField0_ & 0x00400000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(24, appNodeLabelExpression_);
      }
      if (((bitField0_ & 0x00800000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(25, amNodeLabelExpression_);
      }
      for (int i = 0; i < appTimeouts_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(26, appTimeouts_.get(i));
      }
      if (((bitField0_ & 0x01000000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(27, launchTime_);
      }
      if (((bitField0_ & 0x02000000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(28, submitTime_);
      }
      if (((bitField0_ & 0x04000000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(29, rmClusterId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasUser() != other.hasUser()) return false;
      if (hasUser()) {
        if (!getUser()
            .equals(other.getUser())) return false;
      }
      if (hasQueue() != other.hasQueue()) return false;
      if (hasQueue()) {
        if (!getQueue()
            .equals(other.getQueue())) return false;
      }
      if (hasName() != other.hasName()) return false;
      if (hasName()) {
        if (!getName()
            .equals(other.getName())) return false;
      }
      if (hasHost() != other.hasHost()) return false;
      if (hasHost()) {
        if (!getHost()
            .equals(other.getHost())) return false;
      }
      if (hasRpcPort() != other.hasRpcPort()) return false;
      if (hasRpcPort()) {
        if (getRpcPort()
            != other.getRpcPort()) return false;
      }
      if (hasClientToAmToken() != other.hasClientToAmToken()) return false;
      if (hasClientToAmToken()) {
        if (!getClientToAmToken()
            .equals(other.getClientToAmToken())) return false;
      }
      if (hasYarnApplicationState() != other.hasYarnApplicationState()) return false;
      if (hasYarnApplicationState()) {
        if (yarnApplicationState_ != other.yarnApplicationState_) return false;
      }
      if (hasTrackingUrl() != other.hasTrackingUrl()) return false;
      if (hasTrackingUrl()) {
        if (!getTrackingUrl()
            .equals(other.getTrackingUrl())) return false;
      }
      if (hasDiagnostics() != other.hasDiagnostics()) return false;
      if (hasDiagnostics()) {
        if (!getDiagnostics()
            .equals(other.getDiagnostics())) return false;
      }
      if (hasStartTime() != other.hasStartTime()) return false;
      if (hasStartTime()) {
        if (getStartTime()
            != other.getStartTime()) return false;
      }
      if (hasFinishTime() != other.hasFinishTime()) return false;
      if (hasFinishTime()) {
        if (getFinishTime()
            != other.getFinishTime()) return false;
      }
      if (hasFinalApplicationStatus() != other.hasFinalApplicationStatus()) return false;
      if (hasFinalApplicationStatus()) {
        if (finalApplicationStatus_ != other.finalApplicationStatus_) return false;
      }
      if (hasAppResourceUsage() != other.hasAppResourceUsage()) return false;
      if (hasAppResourceUsage()) {
        if (!getAppResourceUsage()
            .equals(other.getAppResourceUsage())) return false;
      }
      if (hasOriginalTrackingUrl() != other.hasOriginalTrackingUrl()) return false;
      if (hasOriginalTrackingUrl()) {
        if (!getOriginalTrackingUrl()
            .equals(other.getOriginalTrackingUrl())) return false;
      }
      if (hasCurrentApplicationAttemptId() != other.hasCurrentApplicationAttemptId()) return false;
      if (hasCurrentApplicationAttemptId()) {
        if (!getCurrentApplicationAttemptId()
            .equals(other.getCurrentApplicationAttemptId())) return false;
      }
      if (hasProgress() != other.hasProgress()) return false;
      if (hasProgress()) {
        if (java.lang.Float.floatToIntBits(getProgress())
            != java.lang.Float.floatToIntBits(
                other.getProgress())) return false;
      }
      if (hasApplicationType() != other.hasApplicationType()) return false;
      if (hasApplicationType()) {
        if (!getApplicationType()
            .equals(other.getApplicationType())) return false;
      }
      if (hasAmRmToken() != other.hasAmRmToken()) return false;
      if (hasAmRmToken()) {
        if (!getAmRmToken()
            .equals(other.getAmRmToken())) return false;
      }
      if (!getApplicationTagsList()
          .equals(other.getApplicationTagsList())) return false;
      if (hasLogAggregationStatus() != other.hasLogAggregationStatus()) return false;
      if (hasLogAggregationStatus()) {
        if (logAggregationStatus_ != other.logAggregationStatus_) return false;
      }
      if (hasUnmanagedApplication() != other.hasUnmanagedApplication()) return false;
      if (hasUnmanagedApplication()) {
        if (getUnmanagedApplication()
            != other.getUnmanagedApplication()) return false;
      }
      if (hasPriority() != other.hasPriority()) return false;
      if (hasPriority()) {
        if (!getPriority()
            .equals(other.getPriority())) return false;
      }
      if (hasAppNodeLabelExpression() != other.hasAppNodeLabelExpression()) return false;
      if (hasAppNodeLabelExpression()) {
        if (!getAppNodeLabelExpression()
            .equals(other.getAppNodeLabelExpression())) return false;
      }
      if (hasAmNodeLabelExpression() != other.hasAmNodeLabelExpression()) return false;
      if (hasAmNodeLabelExpression()) {
        if (!getAmNodeLabelExpression()
            .equals(other.getAmNodeLabelExpression())) return false;
      }
      if (!getAppTimeoutsList()
          .equals(other.getAppTimeoutsList())) return false;
      if (hasLaunchTime() != other.hasLaunchTime()) return false;
      if (hasLaunchTime()) {
        if (getLaunchTime()
            != other.getLaunchTime()) return false;
      }
      if (hasSubmitTime() != other.hasSubmitTime()) return false;
      if (hasSubmitTime()) {
        if (getSubmitTime()
            != other.getSubmitTime()) return false;
      }
      if (hasRmClusterId() != other.hasRmClusterId()) return false;
      if (hasRmClusterId()) {
        if (!getRmClusterId()
            .equals(other.getRmClusterId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATIONID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasUser()) {
        hash = (37 * hash) + USER_FIELD_NUMBER;
        hash = (53 * hash) + getUser().hashCode();
      }
      if (hasQueue()) {
        hash = (37 * hash) + QUEUE_FIELD_NUMBER;
        hash = (53 * hash) + getQueue().hashCode();
      }
      if (hasName()) {
        hash = (37 * hash) + NAME_FIELD_NUMBER;
        hash = (53 * hash) + getName().hashCode();
      }
      if (hasHost()) {
        hash = (37 * hash) + HOST_FIELD_NUMBER;
        hash = (53 * hash) + getHost().hashCode();
      }
      if (hasRpcPort()) {
        hash = (37 * hash) + RPC_PORT_FIELD_NUMBER;
        hash = (53 * hash) + getRpcPort();
      }
      if (hasClientToAmToken()) {
        hash = (37 * hash) + CLIENT_TO_AM_TOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getClientToAmToken().hashCode();
      }
      if (hasYarnApplicationState()) {
        hash = (37 * hash) + YARN_APPLICATION_STATE_FIELD_NUMBER;
        hash = (53 * hash) + yarnApplicationState_;
      }
      if (hasTrackingUrl()) {
        hash = (37 * hash) + TRACKINGURL_FIELD_NUMBER;
        hash = (53 * hash) + getTrackingUrl().hashCode();
      }
      if (hasDiagnostics()) {
        hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnostics().hashCode();
      }
      if (hasStartTime()) {
        hash = (37 * hash) + STARTTIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartTime());
      }
      if (hasFinishTime()) {
        hash = (37 * hash) + FINISHTIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFinishTime());
      }
      if (hasFinalApplicationStatus()) {
        hash = (37 * hash) + FINAL_APPLICATION_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + finalApplicationStatus_;
      }
      if (hasAppResourceUsage()) {
        hash = (37 * hash) + APP_RESOURCE_USAGE_FIELD_NUMBER;
        hash = (53 * hash) + getAppResourceUsage().hashCode();
      }
      if (hasOriginalTrackingUrl()) {
        hash = (37 * hash) + ORIGINALTRACKINGURL_FIELD_NUMBER;
        hash = (53 * hash) + getOriginalTrackingUrl().hashCode();
      }
      if (hasCurrentApplicationAttemptId()) {
        hash = (37 * hash) + CURRENTAPPLICATIONATTEMPTID_FIELD_NUMBER;
        hash = (53 * hash) + getCurrentApplicationAttemptId().hashCode();
      }
      if (hasProgress()) {
        hash = (37 * hash) + PROGRESS_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getProgress());
      }
      if (hasApplicationType()) {
        hash = (37 * hash) + APPLICATIONTYPE_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationType().hashCode();
      }
      if (hasAmRmToken()) {
        hash = (37 * hash) + AM_RM_TOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getAmRmToken().hashCode();
      }
      if (getApplicationTagsCount() > 0) {
        hash = (37 * hash) + APPLICATIONTAGS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationTagsList().hashCode();
      }
      if (hasLogAggregationStatus()) {
        hash = (37 * hash) + LOG_AGGREGATION_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + logAggregationStatus_;
      }
      if (hasUnmanagedApplication()) {
        hash = (37 * hash) + UNMANAGED_APPLICATION_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getUnmanagedApplication());
      }
      if (hasPriority()) {
        hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getPriority().hashCode();
      }
      if (hasAppNodeLabelExpression()) {
        hash = (37 * hash) + APPNODELABELEXPRESSION_FIELD_NUMBER;
        hash = (53 * hash) + getAppNodeLabelExpression().hashCode();
      }
      if (hasAmNodeLabelExpression()) {
        hash = (37 * hash) + AMNODELABELEXPRESSION_FIELD_NUMBER;
        hash = (53 * hash) + getAmNodeLabelExpression().hashCode();
      }
      if (getAppTimeoutsCount() > 0) {
        hash = (37 * hash) + APPTIMEOUTS_FIELD_NUMBER;
        hash = (53 * hash) + getAppTimeoutsList().hashCode();
      }
      if (hasLaunchTime()) {
        hash = (37 * hash) + LAUNCHTIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getLaunchTime());
      }
      if (hasSubmitTime()) {
        hash = (37 * hash) + SUBMITTIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getSubmitTime());
      }
      if (hasRmClusterId()) {
        hash = (37 * hash) + RMCLUSTERID_FIELD_NUMBER;
        hash = (53 * hash) + getRmClusterId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationReportProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationReportProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationReportProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationReportProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
          getClientToAmTokenFieldBuilder();
          getAppResourceUsageFieldBuilder();
          getCurrentApplicationAttemptIdFieldBuilder();
          getAmRmTokenFieldBuilder();
          getPriorityFieldBuilder();
          getAppTimeoutsFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        user_ = "";
        queue_ = "";
        name_ = "";
        host_ = "";
        rpcPort_ = 0;
        clientToAmToken_ = null;
        if (clientToAmTokenBuilder_ != null) {
          clientToAmTokenBuilder_.dispose();
          clientToAmTokenBuilder_ = null;
        }
        yarnApplicationState_ = 1;
        trackingUrl_ = "";
        diagnostics_ = "N/A";
        startTime_ = 0L;
        finishTime_ = 0L;
        finalApplicationStatus_ = 0;
        appResourceUsage_ = null;
        if (appResourceUsageBuilder_ != null) {
          appResourceUsageBuilder_.dispose();
          appResourceUsageBuilder_ = null;
        }
        originalTrackingUrl_ = "";
        currentApplicationAttemptId_ = null;
        if (currentApplicationAttemptIdBuilder_ != null) {
          currentApplicationAttemptIdBuilder_.dispose();
          currentApplicationAttemptIdBuilder_ = null;
        }
        progress_ = 0F;
        applicationType_ = "";
        amRmToken_ = null;
        if (amRmTokenBuilder_ != null) {
          amRmTokenBuilder_.dispose();
          amRmTokenBuilder_ = null;
        }
        applicationTags_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        logAggregationStatus_ = 1;
        unmanagedApplication_ = false;
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        appNodeLabelExpression_ = "";
        amNodeLabelExpression_ = "";
        if (appTimeoutsBuilder_ == null) {
          appTimeouts_ = java.util.Collections.emptyList();
        } else {
          appTimeouts_ = null;
          appTimeoutsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x02000000);
        launchTime_ = 0L;
        submitTime_ = 0L;
        rmClusterId_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationReportProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto result) {
        if (appTimeoutsBuilder_ == null) {
          if (((bitField0_ & 0x02000000) != 0)) {
            appTimeouts_ = java.util.Collections.unmodifiableList(appTimeouts_);
            bitField0_ = (bitField0_ & ~0x02000000);
          }
          result.appTimeouts_ = appTimeouts_;
        } else {
          result.appTimeouts_ = appTimeoutsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.user_ = user_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.queue_ = queue_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.name_ = name_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.host_ = host_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.rpcPort_ = rpcPort_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.clientToAmToken_ = clientToAmTokenBuilder_ == null
              ? clientToAmToken_
              : clientToAmTokenBuilder_.build();
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.yarnApplicationState_ = yarnApplicationState_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.trackingUrl_ = trackingUrl_;
          to_bitField0_ |= 0x00000100;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          result.diagnostics_ = diagnostics_;
          to_bitField0_ |= 0x00000200;
        }
        if (((from_bitField0_ & 0x00000400) != 0)) {
          result.startTime_ = startTime_;
          to_bitField0_ |= 0x00000400;
        }
        if (((from_bitField0_ & 0x00000800) != 0)) {
          result.finishTime_ = finishTime_;
          to_bitField0_ |= 0x00000800;
        }
        if (((from_bitField0_ & 0x00001000) != 0)) {
          result.finalApplicationStatus_ = finalApplicationStatus_;
          to_bitField0_ |= 0x00001000;
        }
        if (((from_bitField0_ & 0x00002000) != 0)) {
          result.appResourceUsage_ = appResourceUsageBuilder_ == null
              ? appResourceUsage_
              : appResourceUsageBuilder_.build();
          to_bitField0_ |= 0x00002000;
        }
        if (((from_bitField0_ & 0x00004000) != 0)) {
          result.originalTrackingUrl_ = originalTrackingUrl_;
          to_bitField0_ |= 0x00004000;
        }
        if (((from_bitField0_ & 0x00008000) != 0)) {
          result.currentApplicationAttemptId_ = currentApplicationAttemptIdBuilder_ == null
              ? currentApplicationAttemptId_
              : currentApplicationAttemptIdBuilder_.build();
          to_bitField0_ |= 0x00008000;
        }
        if (((from_bitField0_ & 0x00010000) != 0)) {
          result.progress_ = progress_;
          to_bitField0_ |= 0x00010000;
        }
        if (((from_bitField0_ & 0x00020000) != 0)) {
          result.applicationType_ = applicationType_;
          to_bitField0_ |= 0x00020000;
        }
        if (((from_bitField0_ & 0x00040000) != 0)) {
          result.amRmToken_ = amRmTokenBuilder_ == null
              ? amRmToken_
              : amRmTokenBuilder_.build();
          to_bitField0_ |= 0x00040000;
        }
        if (((from_bitField0_ & 0x00080000) != 0)) {
          applicationTags_.makeImmutable();
          result.applicationTags_ = applicationTags_;
        }
        if (((from_bitField0_ & 0x00100000) != 0)) {
          result.logAggregationStatus_ = logAggregationStatus_;
          to_bitField0_ |= 0x00080000;
        }
        if (((from_bitField0_ & 0x00200000) != 0)) {
          result.unmanagedApplication_ = unmanagedApplication_;
          to_bitField0_ |= 0x00100000;
        }
        if (((from_bitField0_ & 0x00400000) != 0)) {
          result.priority_ = priorityBuilder_ == null
              ? priority_
              : priorityBuilder_.build();
          to_bitField0_ |= 0x00200000;
        }
        if (((from_bitField0_ & 0x00800000) != 0)) {
          result.appNodeLabelExpression_ = appNodeLabelExpression_;
          to_bitField0_ |= 0x00400000;
        }
        if (((from_bitField0_ & 0x01000000) != 0)) {
          result.amNodeLabelExpression_ = amNodeLabelExpression_;
          to_bitField0_ |= 0x00800000;
        }
        if (((from_bitField0_ & 0x04000000) != 0)) {
          result.launchTime_ = launchTime_;
          to_bitField0_ |= 0x01000000;
        }
        if (((from_bitField0_ & 0x08000000) != 0)) {
          result.submitTime_ = submitTime_;
          to_bitField0_ |= 0x02000000;
        }
        if (((from_bitField0_ & 0x10000000) != 0)) {
          result.rmClusterId_ = rmClusterId_;
          to_bitField0_ |= 0x04000000;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasUser()) {
          user_ = other.user_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasQueue()) {
          queue_ = other.queue_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasName()) {
          name_ = other.name_;
          bitField0_ |= 0x00000008;
          onChanged();
        }
        if (other.hasHost()) {
          host_ = other.host_;
          bitField0_ |= 0x00000010;
          onChanged();
        }
        if (other.hasRpcPort()) {
          setRpcPort(other.getRpcPort());
        }
        if (other.hasClientToAmToken()) {
          mergeClientToAmToken(other.getClientToAmToken());
        }
        if (other.hasYarnApplicationState()) {
          setYarnApplicationState(other.getYarnApplicationState());
        }
        if (other.hasTrackingUrl()) {
          trackingUrl_ = other.trackingUrl_;
          bitField0_ |= 0x00000100;
          onChanged();
        }
        if (other.hasDiagnostics()) {
          diagnostics_ = other.diagnostics_;
          bitField0_ |= 0x00000200;
          onChanged();
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasFinishTime()) {
          setFinishTime(other.getFinishTime());
        }
        if (other.hasFinalApplicationStatus()) {
          setFinalApplicationStatus(other.getFinalApplicationStatus());
        }
        if (other.hasAppResourceUsage()) {
          mergeAppResourceUsage(other.getAppResourceUsage());
        }
        if (other.hasOriginalTrackingUrl()) {
          originalTrackingUrl_ = other.originalTrackingUrl_;
          bitField0_ |= 0x00004000;
          onChanged();
        }
        if (other.hasCurrentApplicationAttemptId()) {
          mergeCurrentApplicationAttemptId(other.getCurrentApplicationAttemptId());
        }
        if (other.hasProgress()) {
          setProgress(other.getProgress());
        }
        if (other.hasApplicationType()) {
          applicationType_ = other.applicationType_;
          bitField0_ |= 0x00020000;
          onChanged();
        }
        if (other.hasAmRmToken()) {
          mergeAmRmToken(other.getAmRmToken());
        }
        if (!other.applicationTags_.isEmpty()) {
          if (applicationTags_.isEmpty()) {
            applicationTags_ = other.applicationTags_;
            bitField0_ |= 0x00080000;
          } else {
            ensureApplicationTagsIsMutable();
            applicationTags_.addAll(other.applicationTags_);
          }
          onChanged();
        }
        if (other.hasLogAggregationStatus()) {
          setLogAggregationStatus(other.getLogAggregationStatus());
        }
        if (other.hasUnmanagedApplication()) {
          setUnmanagedApplication(other.getUnmanagedApplication());
        }
        if (other.hasPriority()) {
          mergePriority(other.getPriority());
        }
        if (other.hasAppNodeLabelExpression()) {
          appNodeLabelExpression_ = other.appNodeLabelExpression_;
          bitField0_ |= 0x00800000;
          onChanged();
        }
        if (other.hasAmNodeLabelExpression()) {
          amNodeLabelExpression_ = other.amNodeLabelExpression_;
          bitField0_ |= 0x01000000;
          onChanged();
        }
        if (appTimeoutsBuilder_ == null) {
          if (!other.appTimeouts_.isEmpty()) {
            if (appTimeouts_.isEmpty()) {
              appTimeouts_ = other.appTimeouts_;
              bitField0_ = (bitField0_ & ~0x02000000);
            } else {
              ensureAppTimeoutsIsMutable();
              appTimeouts_.addAll(other.appTimeouts_);
            }
            onChanged();
          }
        } else {
          if (!other.appTimeouts_.isEmpty()) {
            if (appTimeoutsBuilder_.isEmpty()) {
              appTimeoutsBuilder_.dispose();
              appTimeoutsBuilder_ = null;
              appTimeouts_ = other.appTimeouts_;
              bitField0_ = (bitField0_ & ~0x02000000);
              appTimeoutsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getAppTimeoutsFieldBuilder() : null;
            } else {
              appTimeoutsBuilder_.addAllMessages(other.appTimeouts_);
            }
          }
        }
        if (other.hasLaunchTime()) {
          setLaunchTime(other.getLaunchTime());
        }
        if (other.hasSubmitTime()) {
          setSubmitTime(other.getSubmitTime());
        }
        if (other.hasRmClusterId()) {
          rmClusterId_ = other.rmClusterId_;
          bitField0_ |= 0x10000000;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasClientToAmToken()) {
          if (!getClientToAmToken().isInitialized()) {
            return false;
          }
        }
        if (hasAppResourceUsage()) {
          if (!getAppResourceUsage().isInitialized()) {
            return false;
          }
        }
        if (hasAmRmToken()) {
          if (!getAmRmToken().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getAppTimeoutsCount(); i++) {
          if (!getAppTimeouts(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                user_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                queue_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                name_ = input.readBytes();
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                host_ = input.readBytes();
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 48: {
                rpcPort_ = input.readInt32();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 58: {
                input.readMessage(
                    getClientToAmTokenFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000040;
                break;
              } // case 58
              case 64: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(8, tmpRaw);
                } else {
                  yarnApplicationState_ = tmpRaw;
                  bitField0_ |= 0x00000080;
                }
                break;
              } // case 64
              case 74: {
                trackingUrl_ = input.readBytes();
                bitField0_ |= 0x00000100;
                break;
              } // case 74
              case 82: {
                diagnostics_ = input.readBytes();
                bitField0_ |= 0x00000200;
                break;
              } // case 82
              case 88: {
                startTime_ = input.readInt64();
                bitField0_ |= 0x00000400;
                break;
              } // case 88
              case 96: {
                finishTime_ = input.readInt64();
                bitField0_ |= 0x00000800;
                break;
              } // case 96
              case 104: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(13, tmpRaw);
                } else {
                  finalApplicationStatus_ = tmpRaw;
                  bitField0_ |= 0x00001000;
                }
                break;
              } // case 104
              case 114: {
                input.readMessage(
                    getAppResourceUsageFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00002000;
                break;
              } // case 114
              case 122: {
                originalTrackingUrl_ = input.readBytes();
                bitField0_ |= 0x00004000;
                break;
              } // case 122
              case 130: {
                input.readMessage(
                    getCurrentApplicationAttemptIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00008000;
                break;
              } // case 130
              case 141: {
                progress_ = input.readFloat();
                bitField0_ |= 0x00010000;
                break;
              } // case 141
              case 146: {
                applicationType_ = input.readBytes();
                bitField0_ |= 0x00020000;
                break;
              } // case 146
              case 154: {
                input.readMessage(
                    getAmRmTokenFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00040000;
                break;
              } // case 154
              case 162: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureApplicationTagsIsMutable();
                applicationTags_.add(bs);
                break;
              } // case 162
              case 168: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(21, tmpRaw);
                } else {
                  logAggregationStatus_ = tmpRaw;
                  bitField0_ |= 0x00100000;
                }
                break;
              } // case 168
              case 176: {
                unmanagedApplication_ = input.readBool();
                bitField0_ |= 0x00200000;
                break;
              } // case 176
              case 186: {
                input.readMessage(
                    getPriorityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00400000;
                break;
              } // case 186
              case 194: {
                appNodeLabelExpression_ = input.readBytes();
                bitField0_ |= 0x00800000;
                break;
              } // case 194
              case 202: {
                amNodeLabelExpression_ = input.readBytes();
                bitField0_ |= 0x01000000;
                break;
              } // case 202
              case 210: {
                org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.PARSER,
                        extensionRegistry);
                if (appTimeoutsBuilder_ == null) {
                  ensureAppTimeoutsIsMutable();
                  appTimeouts_.add(m);
                } else {
                  appTimeoutsBuilder_.addMessage(m);
                }
                break;
              } // case 210
              case 216: {
                launchTime_ = input.readInt64();
                bitField0_ |= 0x04000000;
                break;
              } // case 216
              case 224: {
                submitTime_ = input.readInt64();
                bitField0_ |= 0x08000000;
                break;
              } // case 224
              case 234: {
                rmClusterId_ = input.readBytes();
                bitField0_ |= 0x10000000;
                break;
              } // case 234
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto applicationId = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private java.lang.Object user_ = "";
      /**
       * <code>optional string user = 2;</code>
       * @return Whether the user field is set.
       */
      public boolean hasUser() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string user = 2;</code>
       * @return The user.
       */
      public java.lang.String getUser() {
        java.lang.Object ref = user_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            user_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string user = 2;</code>
       * @return The bytes for user.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUserBytes() {
        java.lang.Object ref = user_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          user_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string user = 2;</code>
       * @param value The user to set.
       * @return This builder for chaining.
       */
      public Builder setUser(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        user_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string user = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearUser() {
        user_ = getDefaultInstance().getUser();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string user = 2;</code>
       * @param value The bytes for user to set.
       * @return This builder for chaining.
       */
      public Builder setUserBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        user_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private java.lang.Object queue_ = "";
      /**
       * <code>optional string queue = 3;</code>
       * @return Whether the queue field is set.
       */
      public boolean hasQueue() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string queue = 3;</code>
       * @return The queue.
       */
      public java.lang.String getQueue() {
        java.lang.Object ref = queue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string queue = 3;</code>
       * @return The bytes for queue.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueBytes() {
        java.lang.Object ref = queue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string queue = 3;</code>
       * @param value The queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueue(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueue() {
        queue_ = getDefaultInstance().getQueue();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 3;</code>
       * @param value The bytes for queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private java.lang.Object name_ = "";
      /**
       * <code>optional string name = 4;</code>
       * @return Whether the name field is set.
       */
      public boolean hasName() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional string name = 4;</code>
       * @return The name.
       */
      public java.lang.String getName() {
        java.lang.Object ref = name_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            name_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string name = 4;</code>
       * @return The bytes for name.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNameBytes() {
        java.lang.Object ref = name_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          name_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string name = 4;</code>
       * @param value The name to set.
       * @return This builder for chaining.
       */
      public Builder setName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional string name = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearName() {
        name_ = getDefaultInstance().getName();
        bitField0_ = (bitField0_ & ~0x00000008);
        onChanged();
        return this;
      }
      /**
       * <code>optional string name = 4;</code>
       * @param value The bytes for name to set.
       * @return This builder for chaining.
       */
      public Builder setNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }

      private java.lang.Object host_ = "";
      /**
       * <code>optional string host = 5;</code>
       * @return Whether the host field is set.
       */
      public boolean hasHost() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional string host = 5;</code>
       * @return The host.
       */
      public java.lang.String getHost() {
        java.lang.Object ref = host_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            host_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string host = 5;</code>
       * @return The bytes for host.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHostBytes() {
        java.lang.Object ref = host_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          host_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string host = 5;</code>
       * @param value The host to set.
       * @return This builder for chaining.
       */
      public Builder setHost(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        host_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional string host = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearHost() {
        host_ = getDefaultInstance().getHost();
        bitField0_ = (bitField0_ & ~0x00000010);
        onChanged();
        return this;
      }
      /**
       * <code>optional string host = 5;</code>
       * @param value The bytes for host to set.
       * @return This builder for chaining.
       */
      public Builder setHostBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        host_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }

      private int rpcPort_ ;
      /**
       * <code>optional int32 rpc_port = 6;</code>
       * @return Whether the rpcPort field is set.
       */
      @java.lang.Override
      public boolean hasRpcPort() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional int32 rpc_port = 6;</code>
       * @return The rpcPort.
       */
      @java.lang.Override
      public int getRpcPort() {
        return rpcPort_;
      }
      /**
       * <code>optional int32 rpc_port = 6;</code>
       * @param value The rpcPort to set.
       * @return This builder for chaining.
       */
      public Builder setRpcPort(int value) {

        rpcPort_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 rpc_port = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearRpcPort() {
        bitField0_ = (bitField0_ & ~0x00000020);
        rpcPort_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.security.proto.SecurityProtos.TokenProto clientToAmToken_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> clientToAmTokenBuilder_;
      /**
       * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
       * @return Whether the clientToAmToken field is set.
       */
      public boolean hasClientToAmToken() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
       * @return The clientToAmToken.
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getClientToAmToken() {
        if (clientToAmTokenBuilder_ == null) {
          return clientToAmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : clientToAmToken_;
        } else {
          return clientToAmTokenBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
       */
      public Builder setClientToAmToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (clientToAmTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          clientToAmToken_ = value;
        } else {
          clientToAmTokenBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
       */
      public Builder setClientToAmToken(
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (clientToAmTokenBuilder_ == null) {
          clientToAmToken_ = builderForValue.build();
        } else {
          clientToAmTokenBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
       */
      public Builder mergeClientToAmToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (clientToAmTokenBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0) &&
            clientToAmToken_ != null &&
            clientToAmToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) {
            getClientToAmTokenBuilder().mergeFrom(value);
          } else {
            clientToAmToken_ = value;
          }
        } else {
          clientToAmTokenBuilder_.mergeFrom(value);
        }
        if (clientToAmToken_ != null) {
          bitField0_ |= 0x00000040;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
       */
      public Builder clearClientToAmToken() {
        bitField0_ = (bitField0_ & ~0x00000040);
        clientToAmToken_ = null;
        if (clientToAmTokenBuilder_ != null) {
          clientToAmTokenBuilder_.dispose();
          clientToAmTokenBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getClientToAmTokenBuilder() {
        bitField0_ |= 0x00000040;
        onChanged();
        return getClientToAmTokenFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getClientToAmTokenOrBuilder() {
        if (clientToAmTokenBuilder_ != null) {
          return clientToAmTokenBuilder_.getMessageOrBuilder();
        } else {
          return clientToAmToken_ == null ?
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : clientToAmToken_;
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto client_to_am_token = 7;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
          getClientToAmTokenFieldBuilder() {
        if (clientToAmTokenBuilder_ == null) {
          clientToAmTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>(
                  getClientToAmToken(),
                  getParentForChildren(),
                  isClean());
          clientToAmToken_ = null;
        }
        return clientToAmTokenBuilder_;
      }

      private int yarnApplicationState_ = 1;
      /**
       * <code>optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8;</code>
       * @return Whether the yarnApplicationState field is set.
       */
      @java.lang.Override public boolean hasYarnApplicationState() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8;</code>
       * @return The yarnApplicationState.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto getYarnApplicationState() {
        org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.forNumber(yarnApplicationState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto.NEW : result;
      }
      /**
       * <code>optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8;</code>
       * @param value The yarnApplicationState to set.
       * @return This builder for chaining.
       */
      public Builder setYarnApplicationState(org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000080;
        yarnApplicationState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.YarnApplicationStateProto yarn_application_state = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearYarnApplicationState() {
        bitField0_ = (bitField0_ & ~0x00000080);
        yarnApplicationState_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object trackingUrl_ = "";
      /**
       * <code>optional string trackingUrl = 9;</code>
       * @return Whether the trackingUrl field is set.
       */
      public boolean hasTrackingUrl() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional string trackingUrl = 9;</code>
       * @return The trackingUrl.
       */
      public java.lang.String getTrackingUrl() {
        java.lang.Object ref = trackingUrl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            trackingUrl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string trackingUrl = 9;</code>
       * @return The bytes for trackingUrl.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTrackingUrlBytes() {
        java.lang.Object ref = trackingUrl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          trackingUrl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string trackingUrl = 9;</code>
       * @param value The trackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setTrackingUrl(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        trackingUrl_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional string trackingUrl = 9;</code>
       * @return This builder for chaining.
       */
      public Builder clearTrackingUrl() {
        trackingUrl_ = getDefaultInstance().getTrackingUrl();
        bitField0_ = (bitField0_ & ~0x00000100);
        onChanged();
        return this;
      }
      /**
       * <code>optional string trackingUrl = 9;</code>
       * @param value The bytes for trackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setTrackingUrlBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        trackingUrl_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }

      private java.lang.Object diagnostics_ = "N/A";
      /**
       * <code>optional string diagnostics = 10 [default = "N/A"];</code>
       * @return Whether the diagnostics field is set.
       */
      public boolean hasDiagnostics() {
        return ((bitField0_ & 0x00000200) != 0);
      }
      /**
       * <code>optional string diagnostics = 10 [default = "N/A"];</code>
       * @return The diagnostics.
       */
      public java.lang.String getDiagnostics() {
        java.lang.Object ref = diagnostics_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnostics_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 10 [default = "N/A"];</code>
       * @return The bytes for diagnostics.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsBytes() {
        java.lang.Object ref = diagnostics_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnostics_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 10 [default = "N/A"];</code>
       * @param value The diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnostics(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 10 [default = "N/A"];</code>
       * @return This builder for chaining.
       */
      public Builder clearDiagnostics() {
        diagnostics_ = getDefaultInstance().getDiagnostics();
        bitField0_ = (bitField0_ & ~0x00000200);
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 10 [default = "N/A"];</code>
       * @param value The bytes for diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnosticsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }

      private long startTime_ ;
      /**
       * <code>optional int64 startTime = 11;</code>
       * @return Whether the startTime field is set.
       */
      @java.lang.Override
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * <code>optional int64 startTime = 11;</code>
       * @return The startTime.
       */
      @java.lang.Override
      public long getStartTime() {
        return startTime_;
      }
      /**
       * <code>optional int64 startTime = 11;</code>
       * @param value The startTime to set.
       * @return This builder for chaining.
       */
      public Builder setStartTime(long value) {

        startTime_ = value;
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 startTime = 11;</code>
       * @return This builder for chaining.
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000400);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      private long finishTime_ ;
      /**
       * <code>optional int64 finishTime = 12;</code>
       * @return Whether the finishTime field is set.
       */
      @java.lang.Override
      public boolean hasFinishTime() {
        return ((bitField0_ & 0x00000800) != 0);
      }
      /**
       * <code>optional int64 finishTime = 12;</code>
       * @return The finishTime.
       */
      @java.lang.Override
      public long getFinishTime() {
        return finishTime_;
      }
      /**
       * <code>optional int64 finishTime = 12;</code>
       * @param value The finishTime to set.
       * @return This builder for chaining.
       */
      public Builder setFinishTime(long value) {

        finishTime_ = value;
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 finishTime = 12;</code>
       * @return This builder for chaining.
       */
      public Builder clearFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000800);
        finishTime_ = 0L;
        onChanged();
        return this;
      }

      private int finalApplicationStatus_ = 0;
      /**
       * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13;</code>
       * @return Whether the finalApplicationStatus field is set.
       */
      @java.lang.Override public boolean hasFinalApplicationStatus() {
        return ((bitField0_ & 0x00001000) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13;</code>
       * @return The finalApplicationStatus.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto getFinalApplicationStatus() {
        org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.forNumber(finalApplicationStatus_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto.APP_UNDEFINED : result;
      }
      /**
       * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13;</code>
       * @param value The finalApplicationStatus to set.
       * @return This builder for chaining.
       */
      public Builder setFinalApplicationStatus(org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00001000;
        finalApplicationStatus_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.FinalApplicationStatusProto final_application_status = 13;</code>
       * @return This builder for chaining.
       */
      public Builder clearFinalApplicationStatus() {
        bitField0_ = (bitField0_ & ~0x00001000);
        finalApplicationStatus_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto appResourceUsage_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder> appResourceUsageBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
       * @return Whether the appResourceUsage field is set.
       */
      public boolean hasAppResourceUsage() {
        return ((bitField0_ & 0x00002000) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
       * @return The appResourceUsage.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto getAppResourceUsage() {
        if (appResourceUsageBuilder_ == null) {
          return appResourceUsage_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance() : appResourceUsage_;
        } else {
          return appResourceUsageBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
       */
      public Builder setAppResourceUsage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto value) {
        if (appResourceUsageBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          appResourceUsage_ = value;
        } else {
          appResourceUsageBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00002000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
       */
      public Builder setAppResourceUsage(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder builderForValue) {
        if (appResourceUsageBuilder_ == null) {
          appResourceUsage_ = builderForValue.build();
        } else {
          appResourceUsageBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00002000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
       */
      public Builder mergeAppResourceUsage(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto value) {
        if (appResourceUsageBuilder_ == null) {
          if (((bitField0_ & 0x00002000) != 0) &&
            appResourceUsage_ != null &&
            appResourceUsage_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance()) {
            getAppResourceUsageBuilder().mergeFrom(value);
          } else {
            appResourceUsage_ = value;
          }
        } else {
          appResourceUsageBuilder_.mergeFrom(value);
        }
        if (appResourceUsage_ != null) {
          bitField0_ |= 0x00002000;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
       */
      public Builder clearAppResourceUsage() {
        bitField0_ = (bitField0_ & ~0x00002000);
        appResourceUsage_ = null;
        if (appResourceUsageBuilder_ != null) {
          appResourceUsageBuilder_.dispose();
          appResourceUsageBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder getAppResourceUsageBuilder() {
        bitField0_ |= 0x00002000;
        onChanged();
        return getAppResourceUsageFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder getAppResourceUsageOrBuilder() {
        if (appResourceUsageBuilder_ != null) {
          return appResourceUsageBuilder_.getMessageOrBuilder();
        } else {
          return appResourceUsage_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.getDefaultInstance() : appResourceUsage_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationResourceUsageReportProto app_resource_Usage = 14;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder> 
          getAppResourceUsageFieldBuilder() {
        if (appResourceUsageBuilder_ == null) {
          appResourceUsageBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationResourceUsageReportProtoOrBuilder>(
                  getAppResourceUsage(),
                  getParentForChildren(),
                  isClean());
          appResourceUsage_ = null;
        }
        return appResourceUsageBuilder_;
      }

      private java.lang.Object originalTrackingUrl_ = "";
      /**
       * <code>optional string originalTrackingUrl = 15;</code>
       * @return Whether the originalTrackingUrl field is set.
       */
      public boolean hasOriginalTrackingUrl() {
        return ((bitField0_ & 0x00004000) != 0);
      }
      /**
       * <code>optional string originalTrackingUrl = 15;</code>
       * @return The originalTrackingUrl.
       */
      public java.lang.String getOriginalTrackingUrl() {
        java.lang.Object ref = originalTrackingUrl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            originalTrackingUrl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string originalTrackingUrl = 15;</code>
       * @return The bytes for originalTrackingUrl.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getOriginalTrackingUrlBytes() {
        java.lang.Object ref = originalTrackingUrl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          originalTrackingUrl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string originalTrackingUrl = 15;</code>
       * @param value The originalTrackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setOriginalTrackingUrl(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        originalTrackingUrl_ = value;
        bitField0_ |= 0x00004000;
        onChanged();
        return this;
      }
      /**
       * <code>optional string originalTrackingUrl = 15;</code>
       * @return This builder for chaining.
       */
      public Builder clearOriginalTrackingUrl() {
        originalTrackingUrl_ = getDefaultInstance().getOriginalTrackingUrl();
        bitField0_ = (bitField0_ & ~0x00004000);
        onChanged();
        return this;
      }
      /**
       * <code>optional string originalTrackingUrl = 15;</code>
       * @param value The bytes for originalTrackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setOriginalTrackingUrlBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        originalTrackingUrl_ = value;
        bitField0_ |= 0x00004000;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto currentApplicationAttemptId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> currentApplicationAttemptIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
       * @return Whether the currentApplicationAttemptId field is set.
       */
      public boolean hasCurrentApplicationAttemptId() {
        return ((bitField0_ & 0x00008000) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
       * @return The currentApplicationAttemptId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getCurrentApplicationAttemptId() {
        if (currentApplicationAttemptIdBuilder_ == null) {
          return currentApplicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : currentApplicationAttemptId_;
        } else {
          return currentApplicationAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
       */
      public Builder setCurrentApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (currentApplicationAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          currentApplicationAttemptId_ = value;
        } else {
          currentApplicationAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00008000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
       */
      public Builder setCurrentApplicationAttemptId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
        if (currentApplicationAttemptIdBuilder_ == null) {
          currentApplicationAttemptId_ = builderForValue.build();
        } else {
          currentApplicationAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00008000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
       */
      public Builder mergeCurrentApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (currentApplicationAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00008000) != 0) &&
            currentApplicationAttemptId_ != null &&
            currentApplicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
            getCurrentApplicationAttemptIdBuilder().mergeFrom(value);
          } else {
            currentApplicationAttemptId_ = value;
          }
        } else {
          currentApplicationAttemptIdBuilder_.mergeFrom(value);
        }
        if (currentApplicationAttemptId_ != null) {
          bitField0_ |= 0x00008000;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
       */
      public Builder clearCurrentApplicationAttemptId() {
        bitField0_ = (bitField0_ & ~0x00008000);
        currentApplicationAttemptId_ = null;
        if (currentApplicationAttemptIdBuilder_ != null) {
          currentApplicationAttemptIdBuilder_.dispose();
          currentApplicationAttemptIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getCurrentApplicationAttemptIdBuilder() {
        bitField0_ |= 0x00008000;
        onChanged();
        return getCurrentApplicationAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getCurrentApplicationAttemptIdOrBuilder() {
        if (currentApplicationAttemptIdBuilder_ != null) {
          return currentApplicationAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return currentApplicationAttemptId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : currentApplicationAttemptId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto currentApplicationAttemptId = 16;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> 
          getCurrentApplicationAttemptIdFieldBuilder() {
        if (currentApplicationAttemptIdBuilder_ == null) {
          currentApplicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
                  getCurrentApplicationAttemptId(),
                  getParentForChildren(),
                  isClean());
          currentApplicationAttemptId_ = null;
        }
        return currentApplicationAttemptIdBuilder_;
      }

      private float progress_ ;
      /**
       * <code>optional float progress = 17;</code>
       * @return Whether the progress field is set.
       */
      @java.lang.Override
      public boolean hasProgress() {
        return ((bitField0_ & 0x00010000) != 0);
      }
      /**
       * <code>optional float progress = 17;</code>
       * @return The progress.
       */
      @java.lang.Override
      public float getProgress() {
        return progress_;
      }
      /**
       * <code>optional float progress = 17;</code>
       * @param value The progress to set.
       * @return This builder for chaining.
       */
      public Builder setProgress(float value) {

        progress_ = value;
        bitField0_ |= 0x00010000;
        onChanged();
        return this;
      }
      /**
       * <code>optional float progress = 17;</code>
       * @return This builder for chaining.
       */
      public Builder clearProgress() {
        bitField0_ = (bitField0_ & ~0x00010000);
        progress_ = 0F;
        onChanged();
        return this;
      }

      private java.lang.Object applicationType_ = "";
      /**
       * <code>optional string applicationType = 18;</code>
       * @return Whether the applicationType field is set.
       */
      public boolean hasApplicationType() {
        return ((bitField0_ & 0x00020000) != 0);
      }
      /**
       * <code>optional string applicationType = 18;</code>
       * @return The applicationType.
       */
      public java.lang.String getApplicationType() {
        java.lang.Object ref = applicationType_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            applicationType_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string applicationType = 18;</code>
       * @return The bytes for applicationType.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationTypeBytes() {
        java.lang.Object ref = applicationType_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          applicationType_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string applicationType = 18;</code>
       * @param value The applicationType to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationType(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        applicationType_ = value;
        bitField0_ |= 0x00020000;
        onChanged();
        return this;
      }
      /**
       * <code>optional string applicationType = 18;</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationType() {
        applicationType_ = getDefaultInstance().getApplicationType();
        bitField0_ = (bitField0_ & ~0x00020000);
        onChanged();
        return this;
      }
      /**
       * <code>optional string applicationType = 18;</code>
       * @param value The bytes for applicationType to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationTypeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        applicationType_ = value;
        bitField0_ |= 0x00020000;
        onChanged();
        return this;
      }

      private org.apache.hadoop.security.proto.SecurityProtos.TokenProto amRmToken_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> amRmTokenBuilder_;
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
       * @return Whether the amRmToken field is set.
       */
      public boolean hasAmRmToken() {
        return ((bitField0_ & 0x00040000) != 0);
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
       * @return The amRmToken.
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getAmRmToken() {
        if (amRmTokenBuilder_ == null) {
          return amRmToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_;
        } else {
          return amRmTokenBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
       */
      public Builder setAmRmToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (amRmTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          amRmToken_ = value;
        } else {
          amRmTokenBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00040000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
       */
      public Builder setAmRmToken(
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (amRmTokenBuilder_ == null) {
          amRmToken_ = builderForValue.build();
        } else {
          amRmTokenBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00040000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
       */
      public Builder mergeAmRmToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (amRmTokenBuilder_ == null) {
          if (((bitField0_ & 0x00040000) != 0) &&
            amRmToken_ != null &&
            amRmToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) {
            getAmRmTokenBuilder().mergeFrom(value);
          } else {
            amRmToken_ = value;
          }
        } else {
          amRmTokenBuilder_.mergeFrom(value);
        }
        if (amRmToken_ != null) {
          bitField0_ |= 0x00040000;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
       */
      public Builder clearAmRmToken() {
        bitField0_ = (bitField0_ & ~0x00040000);
        amRmToken_ = null;
        if (amRmTokenBuilder_ != null) {
          amRmTokenBuilder_.dispose();
          amRmTokenBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getAmRmTokenBuilder() {
        bitField0_ |= 0x00040000;
        onChanged();
        return getAmRmTokenFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getAmRmTokenOrBuilder() {
        if (amRmTokenBuilder_ != null) {
          return amRmTokenBuilder_.getMessageOrBuilder();
        } else {
          return amRmToken_ == null ?
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : amRmToken_;
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto am_rm_token = 19;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
          getAmRmTokenFieldBuilder() {
        if (amRmTokenBuilder_ == null) {
          amRmTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>(
                  getAmRmToken(),
                  getParentForChildren(),
                  isClean());
          amRmToken_ = null;
        }
        return amRmTokenBuilder_;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList applicationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureApplicationTagsIsMutable() {
        if (!applicationTags_.isModifiable()) {
          applicationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(applicationTags_);
        }
        bitField0_ |= 0x00080000;
      }
      /**
       * <code>repeated string applicationTags = 20;</code>
       * @return A list containing the applicationTags.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getApplicationTagsList() {
        applicationTags_.makeImmutable();
        return applicationTags_;
      }
      /**
       * <code>repeated string applicationTags = 20;</code>
       * @return The count of applicationTags.
       */
      public int getApplicationTagsCount() {
        return applicationTags_.size();
      }
      /**
       * <code>repeated string applicationTags = 20;</code>
       * @param index The index of the element to return.
       * @return The applicationTags at the given index.
       */
      public java.lang.String getApplicationTags(int index) {
        return applicationTags_.get(index);
      }
      /**
       * <code>repeated string applicationTags = 20;</code>
       * @param index The index of the value to return.
       * @return The bytes of the applicationTags at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationTagsBytes(int index) {
        return applicationTags_.getByteString(index);
      }
      /**
       * <code>repeated string applicationTags = 20;</code>
       * @param index The index to set the value at.
       * @param value The applicationTags to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationTags(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTagsIsMutable();
        applicationTags_.set(index, value);
        bitField0_ |= 0x00080000;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 20;</code>
       * @param value The applicationTags to add.
       * @return This builder for chaining.
       */
      public Builder addApplicationTags(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTagsIsMutable();
        applicationTags_.add(value);
        bitField0_ |= 0x00080000;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 20;</code>
       * @param values The applicationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllApplicationTags(
          java.lang.Iterable<java.lang.String> values) {
        ensureApplicationTagsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, applicationTags_);
        bitField0_ |= 0x00080000;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 20;</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationTags() {
        applicationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00080000);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 20;</code>
       * @param value The bytes of the applicationTags to add.
       * @return This builder for chaining.
       */
      public Builder addApplicationTagsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTagsIsMutable();
        applicationTags_.add(value);
        bitField0_ |= 0x00080000;
        onChanged();
        return this;
      }

      private int logAggregationStatus_ = 1;
      /**
       * <code>optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21;</code>
       * @return Whether the logAggregationStatus field is set.
       */
      @java.lang.Override public boolean hasLogAggregationStatus() {
        return ((bitField0_ & 0x00100000) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21;</code>
       * @return The logAggregationStatus.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto getLogAggregationStatus() {
        org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto result = org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.forNumber(logAggregationStatus_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto.LOG_DISABLED : result;
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21;</code>
       * @param value The logAggregationStatus to set.
       * @return This builder for chaining.
       */
      public Builder setLogAggregationStatus(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationStatusProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00100000;
        logAggregationStatus_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationStatusProto log_aggregation_status = 21;</code>
       * @return This builder for chaining.
       */
      public Builder clearLogAggregationStatus() {
        bitField0_ = (bitField0_ & ~0x00100000);
        logAggregationStatus_ = 1;
        onChanged();
        return this;
      }

      private boolean unmanagedApplication_ ;
      /**
       * <code>optional bool unmanaged_application = 22 [default = false];</code>
       * @return Whether the unmanagedApplication field is set.
       */
      @java.lang.Override
      public boolean hasUnmanagedApplication() {
        return ((bitField0_ & 0x00200000) != 0);
      }
      /**
       * <code>optional bool unmanaged_application = 22 [default = false];</code>
       * @return The unmanagedApplication.
       */
      @java.lang.Override
      public boolean getUnmanagedApplication() {
        return unmanagedApplication_;
      }
      /**
       * <code>optional bool unmanaged_application = 22 [default = false];</code>
       * @param value The unmanagedApplication to set.
       * @return This builder for chaining.
       */
      public Builder setUnmanagedApplication(boolean value) {

        unmanagedApplication_ = value;
        bitField0_ |= 0x00200000;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool unmanaged_application = 22 [default = false];</code>
       * @return This builder for chaining.
       */
      public Builder clearUnmanagedApplication() {
        bitField0_ = (bitField0_ & ~0x00200000);
        unmanagedApplication_ = false;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
       * @return Whether the priority field is set.
       */
      public boolean hasPriority() {
        return ((bitField0_ & 0x00400000) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
       * @return The priority.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
        if (priorityBuilder_ == null) {
          return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        } else {
          return priorityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
       */
      public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          priority_ = value;
        } else {
          priorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00400000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
       */
      public Builder setPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (priorityBuilder_ == null) {
          priority_ = builderForValue.build();
        } else {
          priorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00400000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
       */
      public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (((bitField0_ & 0x00400000) != 0) &&
            priority_ != null &&
            priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            getPriorityBuilder().mergeFrom(value);
          } else {
            priority_ = value;
          }
        } else {
          priorityBuilder_.mergeFrom(value);
        }
        if (priority_ != null) {
          bitField0_ |= 0x00400000;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
       */
      public Builder clearPriority() {
        bitField0_ = (bitField0_ & ~0x00400000);
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
        bitField0_ |= 0x00400000;
        onChanged();
        return getPriorityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
        if (priorityBuilder_ != null) {
          return priorityBuilder_.getMessageOrBuilder();
        } else {
          return priority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 23;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getPriorityFieldBuilder() {
        if (priorityBuilder_ == null) {
          priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getPriority(),
                  getParentForChildren(),
                  isClean());
          priority_ = null;
        }
        return priorityBuilder_;
      }

      private java.lang.Object appNodeLabelExpression_ = "";
      /**
       * <code>optional string appNodeLabelExpression = 24;</code>
       * @return Whether the appNodeLabelExpression field is set.
       */
      public boolean hasAppNodeLabelExpression() {
        return ((bitField0_ & 0x00800000) != 0);
      }
      /**
       * <code>optional string appNodeLabelExpression = 24;</code>
       * @return The appNodeLabelExpression.
       */
      public java.lang.String getAppNodeLabelExpression() {
        java.lang.Object ref = appNodeLabelExpression_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            appNodeLabelExpression_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string appNodeLabelExpression = 24;</code>
       * @return The bytes for appNodeLabelExpression.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAppNodeLabelExpressionBytes() {
        java.lang.Object ref = appNodeLabelExpression_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          appNodeLabelExpression_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string appNodeLabelExpression = 24;</code>
       * @param value The appNodeLabelExpression to set.
       * @return This builder for chaining.
       */
      public Builder setAppNodeLabelExpression(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        appNodeLabelExpression_ = value;
        bitField0_ |= 0x00800000;
        onChanged();
        return this;
      }
      /**
       * <code>optional string appNodeLabelExpression = 24;</code>
       * @return This builder for chaining.
       */
      public Builder clearAppNodeLabelExpression() {
        appNodeLabelExpression_ = getDefaultInstance().getAppNodeLabelExpression();
        bitField0_ = (bitField0_ & ~0x00800000);
        onChanged();
        return this;
      }
      /**
       * <code>optional string appNodeLabelExpression = 24;</code>
       * @param value The bytes for appNodeLabelExpression to set.
       * @return This builder for chaining.
       */
      public Builder setAppNodeLabelExpressionBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        appNodeLabelExpression_ = value;
        bitField0_ |= 0x00800000;
        onChanged();
        return this;
      }

      private java.lang.Object amNodeLabelExpression_ = "";
      /**
       * <code>optional string amNodeLabelExpression = 25;</code>
       * @return Whether the amNodeLabelExpression field is set.
       */
      public boolean hasAmNodeLabelExpression() {
        return ((bitField0_ & 0x01000000) != 0);
      }
      /**
       * <code>optional string amNodeLabelExpression = 25;</code>
       * @return The amNodeLabelExpression.
       */
      public java.lang.String getAmNodeLabelExpression() {
        java.lang.Object ref = amNodeLabelExpression_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            amNodeLabelExpression_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string amNodeLabelExpression = 25;</code>
       * @return The bytes for amNodeLabelExpression.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAmNodeLabelExpressionBytes() {
        java.lang.Object ref = amNodeLabelExpression_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          amNodeLabelExpression_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string amNodeLabelExpression = 25;</code>
       * @param value The amNodeLabelExpression to set.
       * @return This builder for chaining.
       */
      public Builder setAmNodeLabelExpression(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        amNodeLabelExpression_ = value;
        bitField0_ |= 0x01000000;
        onChanged();
        return this;
      }
      /**
       * <code>optional string amNodeLabelExpression = 25;</code>
       * @return This builder for chaining.
       */
      public Builder clearAmNodeLabelExpression() {
        amNodeLabelExpression_ = getDefaultInstance().getAmNodeLabelExpression();
        bitField0_ = (bitField0_ & ~0x01000000);
        onChanged();
        return this;
      }
      /**
       * <code>optional string amNodeLabelExpression = 25;</code>
       * @param value The bytes for amNodeLabelExpression to set.
       * @return This builder for chaining.
       */
      public Builder setAmNodeLabelExpressionBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        amNodeLabelExpression_ = value;
        bitField0_ |= 0x01000000;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto> appTimeouts_ =
        java.util.Collections.emptyList();
      private void ensureAppTimeoutsIsMutable() {
        if (!((bitField0_ & 0x02000000) != 0)) {
          appTimeouts_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto>(appTimeouts_);
          bitField0_ |= 0x02000000;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder> appTimeoutsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto> getAppTimeoutsList() {
        if (appTimeoutsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(appTimeouts_);
        } else {
          return appTimeoutsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public int getAppTimeoutsCount() {
        if (appTimeoutsBuilder_ == null) {
          return appTimeouts_.size();
        } else {
          return appTimeoutsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getAppTimeouts(int index) {
        if (appTimeoutsBuilder_ == null) {
          return appTimeouts_.get(index);
        } else {
          return appTimeoutsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public Builder setAppTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto value) {
        if (appTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAppTimeoutsIsMutable();
          appTimeouts_.set(index, value);
          onChanged();
        } else {
          appTimeoutsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public Builder setAppTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder builderForValue) {
        if (appTimeoutsBuilder_ == null) {
          ensureAppTimeoutsIsMutable();
          appTimeouts_.set(index, builderForValue.build());
          onChanged();
        } else {
          appTimeoutsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public Builder addAppTimeouts(org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto value) {
        if (appTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAppTimeoutsIsMutable();
          appTimeouts_.add(value);
          onChanged();
        } else {
          appTimeoutsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public Builder addAppTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto value) {
        if (appTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAppTimeoutsIsMutable();
          appTimeouts_.add(index, value);
          onChanged();
        } else {
          appTimeoutsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public Builder addAppTimeouts(
          org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder builderForValue) {
        if (appTimeoutsBuilder_ == null) {
          ensureAppTimeoutsIsMutable();
          appTimeouts_.add(builderForValue.build());
          onChanged();
        } else {
          appTimeoutsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public Builder addAppTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder builderForValue) {
        if (appTimeoutsBuilder_ == null) {
          ensureAppTimeoutsIsMutable();
          appTimeouts_.add(index, builderForValue.build());
          onChanged();
        } else {
          appTimeoutsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public Builder addAllAppTimeouts(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto> values) {
        if (appTimeoutsBuilder_ == null) {
          ensureAppTimeoutsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, appTimeouts_);
          onChanged();
        } else {
          appTimeoutsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public Builder clearAppTimeouts() {
        if (appTimeoutsBuilder_ == null) {
          appTimeouts_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x02000000);
          onChanged();
        } else {
          appTimeoutsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public Builder removeAppTimeouts(int index) {
        if (appTimeoutsBuilder_ == null) {
          ensureAppTimeoutsIsMutable();
          appTimeouts_.remove(index);
          onChanged();
        } else {
          appTimeoutsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder getAppTimeoutsBuilder(
          int index) {
        return getAppTimeoutsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder getAppTimeoutsOrBuilder(
          int index) {
        if (appTimeoutsBuilder_ == null) {
          return appTimeouts_.get(index);  } else {
          return appTimeoutsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder> 
           getAppTimeoutsOrBuilderList() {
        if (appTimeoutsBuilder_ != null) {
          return appTimeoutsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(appTimeouts_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder addAppTimeoutsBuilder() {
        return getAppTimeoutsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder addAppTimeoutsBuilder(
          int index) {
        return getAppTimeoutsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.AppTimeoutsMapProto appTimeouts = 26;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder> 
           getAppTimeoutsBuilderList() {
        return getAppTimeoutsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder> 
          getAppTimeoutsFieldBuilder() {
        if (appTimeoutsBuilder_ == null) {
          appTimeoutsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder>(
                  appTimeouts_,
                  ((bitField0_ & 0x02000000) != 0),
                  getParentForChildren(),
                  isClean());
          appTimeouts_ = null;
        }
        return appTimeoutsBuilder_;
      }

      private long launchTime_ ;
      /**
       * <code>optional int64 launchTime = 27;</code>
       * @return Whether the launchTime field is set.
       */
      @java.lang.Override
      public boolean hasLaunchTime() {
        return ((bitField0_ & 0x04000000) != 0);
      }
      /**
       * <code>optional int64 launchTime = 27;</code>
       * @return The launchTime.
       */
      @java.lang.Override
      public long getLaunchTime() {
        return launchTime_;
      }
      /**
       * <code>optional int64 launchTime = 27;</code>
       * @param value The launchTime to set.
       * @return This builder for chaining.
       */
      public Builder setLaunchTime(long value) {

        launchTime_ = value;
        bitField0_ |= 0x04000000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 launchTime = 27;</code>
       * @return This builder for chaining.
       */
      public Builder clearLaunchTime() {
        bitField0_ = (bitField0_ & ~0x04000000);
        launchTime_ = 0L;
        onChanged();
        return this;
      }

      private long submitTime_ ;
      /**
       * <code>optional int64 submitTime = 28;</code>
       * @return Whether the submitTime field is set.
       */
      @java.lang.Override
      public boolean hasSubmitTime() {
        return ((bitField0_ & 0x08000000) != 0);
      }
      /**
       * <code>optional int64 submitTime = 28;</code>
       * @return The submitTime.
       */
      @java.lang.Override
      public long getSubmitTime() {
        return submitTime_;
      }
      /**
       * <code>optional int64 submitTime = 28;</code>
       * @param value The submitTime to set.
       * @return This builder for chaining.
       */
      public Builder setSubmitTime(long value) {

        submitTime_ = value;
        bitField0_ |= 0x08000000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 submitTime = 28;</code>
       * @return This builder for chaining.
       */
      public Builder clearSubmitTime() {
        bitField0_ = (bitField0_ & ~0x08000000);
        submitTime_ = 0L;
        onChanged();
        return this;
      }

      private java.lang.Object rmClusterId_ = "";
      /**
       * <code>optional string rmClusterId = 29;</code>
       * @return Whether the rmClusterId field is set.
       */
      public boolean hasRmClusterId() {
        return ((bitField0_ & 0x10000000) != 0);
      }
      /**
       * <code>optional string rmClusterId = 29;</code>
       * @return The rmClusterId.
       */
      public java.lang.String getRmClusterId() {
        java.lang.Object ref = rmClusterId_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            rmClusterId_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string rmClusterId = 29;</code>
       * @return The bytes for rmClusterId.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getRmClusterIdBytes() {
        java.lang.Object ref = rmClusterId_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          rmClusterId_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string rmClusterId = 29;</code>
       * @param value The rmClusterId to set.
       * @return This builder for chaining.
       */
      public Builder setRmClusterId(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        rmClusterId_ = value;
        bitField0_ |= 0x10000000;
        onChanged();
        return this;
      }
      /**
       * <code>optional string rmClusterId = 29;</code>
       * @return This builder for chaining.
       */
      public Builder clearRmClusterId() {
        rmClusterId_ = getDefaultInstance().getRmClusterId();
        bitField0_ = (bitField0_ & ~0x10000000);
        onChanged();
        return this;
      }
      /**
       * <code>optional string rmClusterId = 29;</code>
       * @param value The bytes for rmClusterId to set.
       * @return This builder for chaining.
       */
      public Builder setRmClusterIdBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        rmClusterId_ = value;
        bitField0_ |= 0x10000000;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationReportProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationReportProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationReportProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ApplicationReportProto>() {
      @java.lang.Override
      public ApplicationReportProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationReportProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationReportProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface AppTimeoutsMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.AppTimeoutsMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return Whether the applicationTimeoutType field is set.
     */
    boolean hasApplicationTimeoutType();
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return The applicationTimeoutType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType();

    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
     * @return Whether the applicationTimeout field is set.
     */
    boolean hasApplicationTimeout();
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
     * @return The applicationTimeout.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getApplicationTimeout();
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder getApplicationTimeoutOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.AppTimeoutsMapProto}
   */
  public static final class AppTimeoutsMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.AppTimeoutsMapProto)
      AppTimeoutsMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use AppTimeoutsMapProto.newBuilder() to construct.
    private AppTimeoutsMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private AppTimeoutsMapProto() {
      applicationTimeoutType_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new AppTimeoutsMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AppTimeoutsMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER = 1;
    private int applicationTimeoutType_ = 1;
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return Whether the applicationTimeoutType field is set.
     */
    @java.lang.Override public boolean hasApplicationTimeoutType() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return The applicationTimeoutType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result;
    }

    public static final int APPLICATION_TIMEOUT_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto applicationTimeout_;
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
     * @return Whether the applicationTimeout field is set.
     */
    @java.lang.Override
    public boolean hasApplicationTimeout() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
     * @return The applicationTimeout.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getApplicationTimeout() {
      return applicationTimeout_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance() : applicationTimeout_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder getApplicationTimeoutOrBuilder() {
      return applicationTimeout_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance() : applicationTimeout_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasApplicationTimeout()) {
        if (!getApplicationTimeout().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, applicationTimeoutType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getApplicationTimeout());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, applicationTimeoutType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getApplicationTimeout());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto) obj;

      if (hasApplicationTimeoutType() != other.hasApplicationTimeoutType()) return false;
      if (hasApplicationTimeoutType()) {
        if (applicationTimeoutType_ != other.applicationTimeoutType_) return false;
      }
      if (hasApplicationTimeout() != other.hasApplicationTimeout()) return false;
      if (hasApplicationTimeout()) {
        if (!getApplicationTimeout()
            .equals(other.getApplicationTimeout())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationTimeoutType()) {
        hash = (37 * hash) + APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + applicationTimeoutType_;
      }
      if (hasApplicationTimeout()) {
        hash = (37 * hash) + APPLICATION_TIMEOUT_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationTimeout().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.AppTimeoutsMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.AppTimeoutsMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AppTimeoutsMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationTimeoutFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationTimeoutType_ = 1;
        applicationTimeout_ = null;
        if (applicationTimeoutBuilder_ != null) {
          applicationTimeoutBuilder_.dispose();
          applicationTimeoutBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationTimeoutType_ = applicationTimeoutType_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.applicationTimeout_ = applicationTimeoutBuilder_ == null
              ? applicationTimeout_
              : applicationTimeoutBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto.getDefaultInstance()) return this;
        if (other.hasApplicationTimeoutType()) {
          setApplicationTimeoutType(other.getApplicationTimeoutType());
        }
        if (other.hasApplicationTimeout()) {
          mergeApplicationTimeout(other.getApplicationTimeout());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasApplicationTimeout()) {
          if (!getApplicationTimeout().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  applicationTimeoutType_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 18: {
                input.readMessage(
                    getApplicationTimeoutFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int applicationTimeoutType_ = 1;
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return Whether the applicationTimeoutType field is set.
       */
      @java.lang.Override public boolean hasApplicationTimeoutType() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return The applicationTimeoutType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @param value The applicationTimeoutType to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationTimeoutType(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        applicationTimeoutType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationTimeoutType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationTimeoutType_ = 1;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto applicationTimeout_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder> applicationTimeoutBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
       * @return Whether the applicationTimeout field is set.
       */
      public boolean hasApplicationTimeout() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
       * @return The applicationTimeout.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getApplicationTimeout() {
        if (applicationTimeoutBuilder_ == null) {
          return applicationTimeout_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance() : applicationTimeout_;
        } else {
          return applicationTimeoutBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
       */
      public Builder setApplicationTimeout(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto value) {
        if (applicationTimeoutBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationTimeout_ = value;
        } else {
          applicationTimeoutBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
       */
      public Builder setApplicationTimeout(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder builderForValue) {
        if (applicationTimeoutBuilder_ == null) {
          applicationTimeout_ = builderForValue.build();
        } else {
          applicationTimeoutBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
       */
      public Builder mergeApplicationTimeout(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto value) {
        if (applicationTimeoutBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            applicationTimeout_ != null &&
            applicationTimeout_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance()) {
            getApplicationTimeoutBuilder().mergeFrom(value);
          } else {
            applicationTimeout_ = value;
          }
        } else {
          applicationTimeoutBuilder_.mergeFrom(value);
        }
        if (applicationTimeout_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
       */
      public Builder clearApplicationTimeout() {
        bitField0_ = (bitField0_ & ~0x00000002);
        applicationTimeout_ = null;
        if (applicationTimeoutBuilder_ != null) {
          applicationTimeoutBuilder_.dispose();
          applicationTimeoutBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder getApplicationTimeoutBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getApplicationTimeoutFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder getApplicationTimeoutOrBuilder() {
        if (applicationTimeoutBuilder_ != null) {
          return applicationTimeoutBuilder_.getMessageOrBuilder();
        } else {
          return applicationTimeout_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance() : applicationTimeout_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutProto application_timeout = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder> 
          getApplicationTimeoutFieldBuilder() {
        if (applicationTimeoutBuilder_ == null) {
          applicationTimeoutBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder>(
                  getApplicationTimeout(),
                  getParentForChildren(),
                  isClean());
          applicationTimeout_ = null;
        }
        return applicationTimeoutBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.AppTimeoutsMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.AppTimeoutsMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<AppTimeoutsMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<AppTimeoutsMapProto>() {
      @java.lang.Override
      public AppTimeoutsMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<AppTimeoutsMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<AppTimeoutsMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.AppTimeoutsMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationTimeoutProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationTimeoutProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return Whether the applicationTimeoutType field is set.
     */
    boolean hasApplicationTimeoutType();
    /**
     * <code>required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return The applicationTimeoutType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType();

    /**
     * <code>optional string expire_time = 2;</code>
     * @return Whether the expireTime field is set.
     */
    boolean hasExpireTime();
    /**
     * <code>optional string expire_time = 2;</code>
     * @return The expireTime.
     */
    java.lang.String getExpireTime();
    /**
     * <code>optional string expire_time = 2;</code>
     * @return The bytes for expireTime.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getExpireTimeBytes();

    /**
     * <code>optional int64 remaining_time = 3;</code>
     * @return Whether the remainingTime field is set.
     */
    boolean hasRemainingTime();
    /**
     * <code>optional int64 remaining_time = 3;</code>
     * @return The remainingTime.
     */
    long getRemainingTime();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationTimeoutProto}
   */
  public static final class ApplicationTimeoutProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationTimeoutProto)
      ApplicationTimeoutProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationTimeoutProto.newBuilder() to construct.
    private ApplicationTimeoutProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ApplicationTimeoutProto() {
      applicationTimeoutType_ = 1;
      expireTime_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ApplicationTimeoutProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER = 1;
    private int applicationTimeoutType_ = 1;
    /**
     * <code>required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return Whether the applicationTimeoutType field is set.
     */
    @java.lang.Override public boolean hasApplicationTimeoutType() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return The applicationTimeoutType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result;
    }

    public static final int EXPIRE_TIME_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object expireTime_ = "";
    /**
     * <code>optional string expire_time = 2;</code>
     * @return Whether the expireTime field is set.
     */
    @java.lang.Override
    public boolean hasExpireTime() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string expire_time = 2;</code>
     * @return The expireTime.
     */
    @java.lang.Override
    public java.lang.String getExpireTime() {
      java.lang.Object ref = expireTime_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          expireTime_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string expire_time = 2;</code>
     * @return The bytes for expireTime.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getExpireTimeBytes() {
      java.lang.Object ref = expireTime_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        expireTime_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int REMAINING_TIME_FIELD_NUMBER = 3;
    private long remainingTime_ = 0L;
    /**
     * <code>optional int64 remaining_time = 3;</code>
     * @return Whether the remainingTime field is set.
     */
    @java.lang.Override
    public boolean hasRemainingTime() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int64 remaining_time = 3;</code>
     * @return The remainingTime.
     */
    @java.lang.Override
    public long getRemainingTime() {
      return remainingTime_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasApplicationTimeoutType()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, applicationTimeoutType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, expireTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(3, remainingTime_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, applicationTimeoutType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, expireTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(3, remainingTime_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto) obj;

      if (hasApplicationTimeoutType() != other.hasApplicationTimeoutType()) return false;
      if (hasApplicationTimeoutType()) {
        if (applicationTimeoutType_ != other.applicationTimeoutType_) return false;
      }
      if (hasExpireTime() != other.hasExpireTime()) return false;
      if (hasExpireTime()) {
        if (!getExpireTime()
            .equals(other.getExpireTime())) return false;
      }
      if (hasRemainingTime() != other.hasRemainingTime()) return false;
      if (hasRemainingTime()) {
        if (getRemainingTime()
            != other.getRemainingTime()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationTimeoutType()) {
        hash = (37 * hash) + APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + applicationTimeoutType_;
      }
      if (hasExpireTime()) {
        hash = (37 * hash) + EXPIRE_TIME_FIELD_NUMBER;
        hash = (53 * hash) + getExpireTime().hashCode();
      }
      if (hasRemainingTime()) {
        hash = (37 * hash) + REMAINING_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getRemainingTime());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationTimeoutProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationTimeoutProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationTimeoutType_ = 1;
        expireTime_ = "";
        remainingTime_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationTimeoutType_ = applicationTimeoutType_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.expireTime_ = expireTime_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.remainingTime_ = remainingTime_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto.getDefaultInstance()) return this;
        if (other.hasApplicationTimeoutType()) {
          setApplicationTimeoutType(other.getApplicationTimeoutType());
        }
        if (other.hasExpireTime()) {
          expireTime_ = other.expireTime_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasRemainingTime()) {
          setRemainingTime(other.getRemainingTime());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasApplicationTimeoutType()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  applicationTimeoutType_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 18: {
                expireTime_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                remainingTime_ = input.readInt64();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int applicationTimeoutType_ = 1;
      /**
       * <code>required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return Whether the applicationTimeoutType field is set.
       */
      @java.lang.Override public boolean hasApplicationTimeoutType() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return The applicationTimeoutType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @param value The applicationTimeoutType to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationTimeoutType(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        applicationTimeoutType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationTimeoutType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationTimeoutType_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object expireTime_ = "";
      /**
       * <code>optional string expire_time = 2;</code>
       * @return Whether the expireTime field is set.
       */
      public boolean hasExpireTime() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string expire_time = 2;</code>
       * @return The expireTime.
       */
      public java.lang.String getExpireTime() {
        java.lang.Object ref = expireTime_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            expireTime_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string expire_time = 2;</code>
       * @return The bytes for expireTime.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getExpireTimeBytes() {
        java.lang.Object ref = expireTime_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          expireTime_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string expire_time = 2;</code>
       * @param value The expireTime to set.
       * @return This builder for chaining.
       */
      public Builder setExpireTime(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        expireTime_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string expire_time = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearExpireTime() {
        expireTime_ = getDefaultInstance().getExpireTime();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string expire_time = 2;</code>
       * @param value The bytes for expireTime to set.
       * @return This builder for chaining.
       */
      public Builder setExpireTimeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        expireTime_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private long remainingTime_ ;
      /**
       * <code>optional int64 remaining_time = 3;</code>
       * @return Whether the remainingTime field is set.
       */
      @java.lang.Override
      public boolean hasRemainingTime() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int64 remaining_time = 3;</code>
       * @return The remainingTime.
       */
      @java.lang.Override
      public long getRemainingTime() {
        return remainingTime_;
      }
      /**
       * <code>optional int64 remaining_time = 3;</code>
       * @param value The remainingTime to set.
       * @return This builder for chaining.
       */
      public Builder setRemainingTime(long value) {

        remainingTime_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 remaining_time = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearRemainingTime() {
        bitField0_ = (bitField0_ & ~0x00000004);
        remainingTime_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationTimeoutProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationTimeoutProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationTimeoutProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ApplicationTimeoutProto>() {
      @java.lang.Override
      public ApplicationTimeoutProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationTimeoutProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationTimeoutProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationAttemptReportProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationAttemptReportProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return Whether the applicationAttemptId field is set.
     */
    boolean hasApplicationAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return The applicationAttemptId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId();
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder();

    /**
     * <code>optional string host = 2;</code>
     * @return Whether the host field is set.
     */
    boolean hasHost();
    /**
     * <code>optional string host = 2;</code>
     * @return The host.
     */
    java.lang.String getHost();
    /**
     * <code>optional string host = 2;</code>
     * @return The bytes for host.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes();

    /**
     * <code>optional int32 rpc_port = 3;</code>
     * @return Whether the rpcPort field is set.
     */
    boolean hasRpcPort();
    /**
     * <code>optional int32 rpc_port = 3;</code>
     * @return The rpcPort.
     */
    int getRpcPort();

    /**
     * <code>optional string tracking_url = 4;</code>
     * @return Whether the trackingUrl field is set.
     */
    boolean hasTrackingUrl();
    /**
     * <code>optional string tracking_url = 4;</code>
     * @return The trackingUrl.
     */
    java.lang.String getTrackingUrl();
    /**
     * <code>optional string tracking_url = 4;</code>
     * @return The bytes for trackingUrl.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes();

    /**
     * <code>optional string diagnostics = 5 [default = "N/A"];</code>
     * @return Whether the diagnostics field is set.
     */
    boolean hasDiagnostics();
    /**
     * <code>optional string diagnostics = 5 [default = "N/A"];</code>
     * @return The diagnostics.
     */
    java.lang.String getDiagnostics();
    /**
     * <code>optional string diagnostics = 5 [default = "N/A"];</code>
     * @return The bytes for diagnostics.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes();

    /**
     * <code>optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6;</code>
     * @return Whether the yarnApplicationAttemptState field is set.
     */
    boolean hasYarnApplicationAttemptState();
    /**
     * <code>optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6;</code>
     * @return The yarnApplicationAttemptState.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState();

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
     * @return Whether the amContainerId field is set.
     */
    boolean hasAmContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
     * @return The amContainerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getAmContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getAmContainerIdOrBuilder();

    /**
     * <code>optional string original_tracking_url = 8;</code>
     * @return Whether the originalTrackingUrl field is set.
     */
    boolean hasOriginalTrackingUrl();
    /**
     * <code>optional string original_tracking_url = 8;</code>
     * @return The originalTrackingUrl.
     */
    java.lang.String getOriginalTrackingUrl();
    /**
     * <code>optional string original_tracking_url = 8;</code>
     * @return The bytes for originalTrackingUrl.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getOriginalTrackingUrlBytes();

    /**
     * <code>optional int64 startTime = 9;</code>
     * @return Whether the startTime field is set.
     */
    boolean hasStartTime();
    /**
     * <code>optional int64 startTime = 9;</code>
     * @return The startTime.
     */
    long getStartTime();

    /**
     * <code>optional int64 finishTime = 10;</code>
     * @return Whether the finishTime field is set.
     */
    boolean hasFinishTime();
    /**
     * <code>optional int64 finishTime = 10;</code>
     * @return The finishTime.
     */
    long getFinishTime();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationAttemptReportProto}
   */
  public static final class ApplicationAttemptReportProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationAttemptReportProto)
      ApplicationAttemptReportProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationAttemptReportProto.newBuilder() to construct.
    private ApplicationAttemptReportProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ApplicationAttemptReportProto() {
      host_ = "";
      trackingUrl_ = "";
      diagnostics_ = "N/A";
      yarnApplicationAttemptState_ = 1;
      originalTrackingUrl_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ApplicationAttemptReportProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptReportProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ATTEMPT_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return Whether the applicationAttemptId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationAttemptId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     * @return The applicationAttemptId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
      return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
    }

    public static final int HOST_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object host_ = "";
    /**
     * <code>optional string host = 2;</code>
     * @return Whether the host field is set.
     */
    @java.lang.Override
    public boolean hasHost() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string host = 2;</code>
     * @return The host.
     */
    @java.lang.Override
    public java.lang.String getHost() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          host_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string host = 2;</code>
     * @return The bytes for host.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        host_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RPC_PORT_FIELD_NUMBER = 3;
    private int rpcPort_ = 0;
    /**
     * <code>optional int32 rpc_port = 3;</code>
     * @return Whether the rpcPort field is set.
     */
    @java.lang.Override
    public boolean hasRpcPort() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int32 rpc_port = 3;</code>
     * @return The rpcPort.
     */
    @java.lang.Override
    public int getRpcPort() {
      return rpcPort_;
    }

    public static final int TRACKING_URL_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private volatile java.lang.Object trackingUrl_ = "";
    /**
     * <code>optional string tracking_url = 4;</code>
     * @return Whether the trackingUrl field is set.
     */
    @java.lang.Override
    public boolean hasTrackingUrl() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional string tracking_url = 4;</code>
     * @return The trackingUrl.
     */
    @java.lang.Override
    public java.lang.String getTrackingUrl() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          trackingUrl_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string tracking_url = 4;</code>
     * @return The bytes for trackingUrl.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTrackingUrlBytes() {
      java.lang.Object ref = trackingUrl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        trackingUrl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int DIAGNOSTICS_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private volatile java.lang.Object diagnostics_ = "N/A";
    /**
     * <code>optional string diagnostics = 5 [default = "N/A"];</code>
     * @return Whether the diagnostics field is set.
     */
    @java.lang.Override
    public boolean hasDiagnostics() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional string diagnostics = 5 [default = "N/A"];</code>
     * @return The diagnostics.
     */
    @java.lang.Override
    public java.lang.String getDiagnostics() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnostics_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string diagnostics = 5 [default = "N/A"];</code>
     * @return The bytes for diagnostics.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnostics_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int YARN_APPLICATION_ATTEMPT_STATE_FIELD_NUMBER = 6;
    private int yarnApplicationAttemptState_ = 1;
    /**
     * <code>optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6;</code>
     * @return Whether the yarnApplicationAttemptState field is set.
     */
    @java.lang.Override public boolean hasYarnApplicationAttemptState() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6;</code>
     * @return The yarnApplicationAttemptState.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState() {
      org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.forNumber(yarnApplicationAttemptState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.APP_ATTEMPT_NEW : result;
    }

    public static final int AM_CONTAINER_ID_FIELD_NUMBER = 7;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto amContainerId_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
     * @return Whether the amContainerId field is set.
     */
    @java.lang.Override
    public boolean hasAmContainerId() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
     * @return The amContainerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getAmContainerId() {
      return amContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : amContainerId_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getAmContainerIdOrBuilder() {
      return amContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : amContainerId_;
    }

    public static final int ORIGINAL_TRACKING_URL_FIELD_NUMBER = 8;
    @SuppressWarnings("serial")
    private volatile java.lang.Object originalTrackingUrl_ = "";
    /**
     * <code>optional string original_tracking_url = 8;</code>
     * @return Whether the originalTrackingUrl field is set.
     */
    @java.lang.Override
    public boolean hasOriginalTrackingUrl() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional string original_tracking_url = 8;</code>
     * @return The originalTrackingUrl.
     */
    @java.lang.Override
    public java.lang.String getOriginalTrackingUrl() {
      java.lang.Object ref = originalTrackingUrl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          originalTrackingUrl_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string original_tracking_url = 8;</code>
     * @return The bytes for originalTrackingUrl.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getOriginalTrackingUrlBytes() {
      java.lang.Object ref = originalTrackingUrl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        originalTrackingUrl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int STARTTIME_FIELD_NUMBER = 9;
    private long startTime_ = 0L;
    /**
     * <code>optional int64 startTime = 9;</code>
     * @return Whether the startTime field is set.
     */
    @java.lang.Override
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional int64 startTime = 9;</code>
     * @return The startTime.
     */
    @java.lang.Override
    public long getStartTime() {
      return startTime_;
    }

    public static final int FINISHTIME_FIELD_NUMBER = 10;
    private long finishTime_ = 0L;
    /**
     * <code>optional int64 finishTime = 10;</code>
     * @return Whether the finishTime field is set.
     */
    @java.lang.Override
    public boolean hasFinishTime() {
      return ((bitField0_ & 0x00000200) != 0);
    }
    /**
     * <code>optional int64 finishTime = 10;</code>
     * @return The finishTime.
     */
    @java.lang.Override
    public long getFinishTime() {
      return finishTime_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationAttemptId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, host_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt32(3, rpcPort_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, trackingUrl_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, diagnostics_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeEnum(6, yarnApplicationAttemptState_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeMessage(7, getAmContainerId());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, originalTrackingUrl_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeInt64(9, startTime_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        output.writeInt64(10, finishTime_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationAttemptId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, host_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(3, rpcPort_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, trackingUrl_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, diagnostics_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(6, yarnApplicationAttemptState_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, getAmContainerId());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(8, originalTrackingUrl_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(9, startTime_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(10, finishTime_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto) obj;

      if (hasApplicationAttemptId() != other.hasApplicationAttemptId()) return false;
      if (hasApplicationAttemptId()) {
        if (!getApplicationAttemptId()
            .equals(other.getApplicationAttemptId())) return false;
      }
      if (hasHost() != other.hasHost()) return false;
      if (hasHost()) {
        if (!getHost()
            .equals(other.getHost())) return false;
      }
      if (hasRpcPort() != other.hasRpcPort()) return false;
      if (hasRpcPort()) {
        if (getRpcPort()
            != other.getRpcPort()) return false;
      }
      if (hasTrackingUrl() != other.hasTrackingUrl()) return false;
      if (hasTrackingUrl()) {
        if (!getTrackingUrl()
            .equals(other.getTrackingUrl())) return false;
      }
      if (hasDiagnostics() != other.hasDiagnostics()) return false;
      if (hasDiagnostics()) {
        if (!getDiagnostics()
            .equals(other.getDiagnostics())) return false;
      }
      if (hasYarnApplicationAttemptState() != other.hasYarnApplicationAttemptState()) return false;
      if (hasYarnApplicationAttemptState()) {
        if (yarnApplicationAttemptState_ != other.yarnApplicationAttemptState_) return false;
      }
      if (hasAmContainerId() != other.hasAmContainerId()) return false;
      if (hasAmContainerId()) {
        if (!getAmContainerId()
            .equals(other.getAmContainerId())) return false;
      }
      if (hasOriginalTrackingUrl() != other.hasOriginalTrackingUrl()) return false;
      if (hasOriginalTrackingUrl()) {
        if (!getOriginalTrackingUrl()
            .equals(other.getOriginalTrackingUrl())) return false;
      }
      if (hasStartTime() != other.hasStartTime()) return false;
      if (hasStartTime()) {
        if (getStartTime()
            != other.getStartTime()) return false;
      }
      if (hasFinishTime() != other.hasFinishTime()) return false;
      if (hasFinishTime()) {
        if (getFinishTime()
            != other.getFinishTime()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationAttemptId()) {
        hash = (37 * hash) + APPLICATION_ATTEMPT_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationAttemptId().hashCode();
      }
      if (hasHost()) {
        hash = (37 * hash) + HOST_FIELD_NUMBER;
        hash = (53 * hash) + getHost().hashCode();
      }
      if (hasRpcPort()) {
        hash = (37 * hash) + RPC_PORT_FIELD_NUMBER;
        hash = (53 * hash) + getRpcPort();
      }
      if (hasTrackingUrl()) {
        hash = (37 * hash) + TRACKING_URL_FIELD_NUMBER;
        hash = (53 * hash) + getTrackingUrl().hashCode();
      }
      if (hasDiagnostics()) {
        hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnostics().hashCode();
      }
      if (hasYarnApplicationAttemptState()) {
        hash = (37 * hash) + YARN_APPLICATION_ATTEMPT_STATE_FIELD_NUMBER;
        hash = (53 * hash) + yarnApplicationAttemptState_;
      }
      if (hasAmContainerId()) {
        hash = (37 * hash) + AM_CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getAmContainerId().hashCode();
      }
      if (hasOriginalTrackingUrl()) {
        hash = (37 * hash) + ORIGINAL_TRACKING_URL_FIELD_NUMBER;
        hash = (53 * hash) + getOriginalTrackingUrl().hashCode();
      }
      if (hasStartTime()) {
        hash = (37 * hash) + STARTTIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartTime());
      }
      if (hasFinishTime()) {
        hash = (37 * hash) + FINISHTIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFinishTime());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationAttemptReportProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationAttemptReportProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptReportProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationAttemptIdFieldBuilder();
          getAmContainerIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationAttemptId_ = null;
        if (applicationAttemptIdBuilder_ != null) {
          applicationAttemptIdBuilder_.dispose();
          applicationAttemptIdBuilder_ = null;
        }
        host_ = "";
        rpcPort_ = 0;
        trackingUrl_ = "";
        diagnostics_ = "N/A";
        yarnApplicationAttemptState_ = 1;
        amContainerId_ = null;
        if (amContainerIdBuilder_ != null) {
          amContainerIdBuilder_.dispose();
          amContainerIdBuilder_ = null;
        }
        originalTrackingUrl_ = "";
        startTime_ = 0L;
        finishTime_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationAttemptId_ = applicationAttemptIdBuilder_ == null
              ? applicationAttemptId_
              : applicationAttemptIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.host_ = host_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.rpcPort_ = rpcPort_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.trackingUrl_ = trackingUrl_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.diagnostics_ = diagnostics_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.yarnApplicationAttemptState_ = yarnApplicationAttemptState_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.amContainerId_ = amContainerIdBuilder_ == null
              ? amContainerId_
              : amContainerIdBuilder_.build();
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.originalTrackingUrl_ = originalTrackingUrl_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.startTime_ = startTime_;
          to_bitField0_ |= 0x00000100;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          result.finishTime_ = finishTime_;
          to_bitField0_ |= 0x00000200;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto.getDefaultInstance()) return this;
        if (other.hasApplicationAttemptId()) {
          mergeApplicationAttemptId(other.getApplicationAttemptId());
        }
        if (other.hasHost()) {
          host_ = other.host_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasRpcPort()) {
          setRpcPort(other.getRpcPort());
        }
        if (other.hasTrackingUrl()) {
          trackingUrl_ = other.trackingUrl_;
          bitField0_ |= 0x00000008;
          onChanged();
        }
        if (other.hasDiagnostics()) {
          diagnostics_ = other.diagnostics_;
          bitField0_ |= 0x00000010;
          onChanged();
        }
        if (other.hasYarnApplicationAttemptState()) {
          setYarnApplicationAttemptState(other.getYarnApplicationAttemptState());
        }
        if (other.hasAmContainerId()) {
          mergeAmContainerId(other.getAmContainerId());
        }
        if (other.hasOriginalTrackingUrl()) {
          originalTrackingUrl_ = other.originalTrackingUrl_;
          bitField0_ |= 0x00000080;
          onChanged();
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasFinishTime()) {
          setFinishTime(other.getFinishTime());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationAttemptIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                host_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                rpcPort_ = input.readInt32();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 34: {
                trackingUrl_ = input.readBytes();
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                diagnostics_ = input.readBytes();
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 48: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(6, tmpRaw);
                } else {
                  yarnApplicationAttemptState_ = tmpRaw;
                  bitField0_ |= 0x00000020;
                }
                break;
              } // case 48
              case 58: {
                input.readMessage(
                    getAmContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000040;
                break;
              } // case 58
              case 66: {
                originalTrackingUrl_ = input.readBytes();
                bitField0_ |= 0x00000080;
                break;
              } // case 66
              case 72: {
                startTime_ = input.readInt64();
                bitField0_ |= 0x00000100;
                break;
              } // case 72
              case 80: {
                finishTime_ = input.readInt64();
                bitField0_ |= 0x00000200;
                break;
              } // case 80
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto applicationAttemptId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> applicationAttemptIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       * @return Whether the applicationAttemptId field is set.
       */
      public boolean hasApplicationAttemptId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       * @return The applicationAttemptId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto getApplicationAttemptId() {
        if (applicationAttemptIdBuilder_ == null) {
          return applicationAttemptId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        } else {
          return applicationAttemptIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder setApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationAttemptId_ = value;
        } else {
          applicationAttemptIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder setApplicationAttemptId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder builderForValue) {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptId_ = builderForValue.build();
        } else {
          applicationAttemptIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder mergeApplicationAttemptId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto value) {
        if (applicationAttemptIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationAttemptId_ != null &&
            applicationAttemptId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance()) {
            getApplicationAttemptIdBuilder().mergeFrom(value);
          } else {
            applicationAttemptId_ = value;
          }
        } else {
          applicationAttemptIdBuilder_.mergeFrom(value);
        }
        if (applicationAttemptId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public Builder clearApplicationAttemptId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationAttemptId_ = null;
        if (applicationAttemptIdBuilder_ != null) {
          applicationAttemptIdBuilder_.dispose();
          applicationAttemptIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder getApplicationAttemptIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationAttemptIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder getApplicationAttemptIdOrBuilder() {
        if (applicationAttemptIdBuilder_ != null) {
          return applicationAttemptIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationAttemptId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.getDefaultInstance() : applicationAttemptId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAttemptIdProto application_attempt_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder> 
          getApplicationAttemptIdFieldBuilder() {
        if (applicationAttemptIdBuilder_ == null) {
          applicationAttemptIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProtoOrBuilder>(
                  getApplicationAttemptId(),
                  getParentForChildren(),
                  isClean());
          applicationAttemptId_ = null;
        }
        return applicationAttemptIdBuilder_;
      }

      private java.lang.Object host_ = "";
      /**
       * <code>optional string host = 2;</code>
       * @return Whether the host field is set.
       */
      public boolean hasHost() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string host = 2;</code>
       * @return The host.
       */
      public java.lang.String getHost() {
        java.lang.Object ref = host_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            host_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string host = 2;</code>
       * @return The bytes for host.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHostBytes() {
        java.lang.Object ref = host_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          host_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string host = 2;</code>
       * @param value The host to set.
       * @return This builder for chaining.
       */
      public Builder setHost(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        host_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string host = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearHost() {
        host_ = getDefaultInstance().getHost();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string host = 2;</code>
       * @param value The bytes for host to set.
       * @return This builder for chaining.
       */
      public Builder setHostBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        host_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private int rpcPort_ ;
      /**
       * <code>optional int32 rpc_port = 3;</code>
       * @return Whether the rpcPort field is set.
       */
      @java.lang.Override
      public boolean hasRpcPort() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int32 rpc_port = 3;</code>
       * @return The rpcPort.
       */
      @java.lang.Override
      public int getRpcPort() {
        return rpcPort_;
      }
      /**
       * <code>optional int32 rpc_port = 3;</code>
       * @param value The rpcPort to set.
       * @return This builder for chaining.
       */
      public Builder setRpcPort(int value) {

        rpcPort_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 rpc_port = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearRpcPort() {
        bitField0_ = (bitField0_ & ~0x00000004);
        rpcPort_ = 0;
        onChanged();
        return this;
      }

      private java.lang.Object trackingUrl_ = "";
      /**
       * <code>optional string tracking_url = 4;</code>
       * @return Whether the trackingUrl field is set.
       */
      public boolean hasTrackingUrl() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional string tracking_url = 4;</code>
       * @return The trackingUrl.
       */
      public java.lang.String getTrackingUrl() {
        java.lang.Object ref = trackingUrl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            trackingUrl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string tracking_url = 4;</code>
       * @return The bytes for trackingUrl.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTrackingUrlBytes() {
        java.lang.Object ref = trackingUrl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          trackingUrl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string tracking_url = 4;</code>
       * @param value The trackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setTrackingUrl(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        trackingUrl_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional string tracking_url = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearTrackingUrl() {
        trackingUrl_ = getDefaultInstance().getTrackingUrl();
        bitField0_ = (bitField0_ & ~0x00000008);
        onChanged();
        return this;
      }
      /**
       * <code>optional string tracking_url = 4;</code>
       * @param value The bytes for trackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setTrackingUrlBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        trackingUrl_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }

      private java.lang.Object diagnostics_ = "N/A";
      /**
       * <code>optional string diagnostics = 5 [default = "N/A"];</code>
       * @return Whether the diagnostics field is set.
       */
      public boolean hasDiagnostics() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional string diagnostics = 5 [default = "N/A"];</code>
       * @return The diagnostics.
       */
      public java.lang.String getDiagnostics() {
        java.lang.Object ref = diagnostics_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnostics_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 5 [default = "N/A"];</code>
       * @return The bytes for diagnostics.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsBytes() {
        java.lang.Object ref = diagnostics_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnostics_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 5 [default = "N/A"];</code>
       * @param value The diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnostics(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 5 [default = "N/A"];</code>
       * @return This builder for chaining.
       */
      public Builder clearDiagnostics() {
        diagnostics_ = getDefaultInstance().getDiagnostics();
        bitField0_ = (bitField0_ & ~0x00000010);
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 5 [default = "N/A"];</code>
       * @param value The bytes for diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnosticsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }

      private int yarnApplicationAttemptState_ = 1;
      /**
       * <code>optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6;</code>
       * @return Whether the yarnApplicationAttemptState field is set.
       */
      @java.lang.Override public boolean hasYarnApplicationAttemptState() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6;</code>
       * @return The yarnApplicationAttemptState.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto getYarnApplicationAttemptState() {
        org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.forNumber(yarnApplicationAttemptState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto.APP_ATTEMPT_NEW : result;
      }
      /**
       * <code>optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6;</code>
       * @param value The yarnApplicationAttemptState to set.
       * @return This builder for chaining.
       */
      public Builder setYarnApplicationAttemptState(org.apache.hadoop.yarn.proto.YarnProtos.YarnApplicationAttemptStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000020;
        yarnApplicationAttemptState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.YarnApplicationAttemptStateProto yarn_application_attempt_state = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearYarnApplicationAttemptState() {
        bitField0_ = (bitField0_ & ~0x00000020);
        yarnApplicationAttemptState_ = 1;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto amContainerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> amContainerIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
       * @return Whether the amContainerId field is set.
       */
      public boolean hasAmContainerId() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
       * @return The amContainerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getAmContainerId() {
        if (amContainerIdBuilder_ == null) {
          return amContainerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : amContainerId_;
        } else {
          return amContainerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
       */
      public Builder setAmContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (amContainerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          amContainerId_ = value;
        } else {
          amContainerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
       */
      public Builder setAmContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (amContainerIdBuilder_ == null) {
          amContainerId_ = builderForValue.build();
        } else {
          amContainerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
       */
      public Builder mergeAmContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (amContainerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0) &&
            amContainerId_ != null &&
            amContainerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getAmContainerIdBuilder().mergeFrom(value);
          } else {
            amContainerId_ = value;
          }
        } else {
          amContainerIdBuilder_.mergeFrom(value);
        }
        if (amContainerId_ != null) {
          bitField0_ |= 0x00000040;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
       */
      public Builder clearAmContainerId() {
        bitField0_ = (bitField0_ & ~0x00000040);
        amContainerId_ = null;
        if (amContainerIdBuilder_ != null) {
          amContainerIdBuilder_.dispose();
          amContainerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getAmContainerIdBuilder() {
        bitField0_ |= 0x00000040;
        onChanged();
        return getAmContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getAmContainerIdOrBuilder() {
        if (amContainerIdBuilder_ != null) {
          return amContainerIdBuilder_.getMessageOrBuilder();
        } else {
          return amContainerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : amContainerId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto am_container_id = 7;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getAmContainerIdFieldBuilder() {
        if (amContainerIdBuilder_ == null) {
          amContainerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getAmContainerId(),
                  getParentForChildren(),
                  isClean());
          amContainerId_ = null;
        }
        return amContainerIdBuilder_;
      }

      private java.lang.Object originalTrackingUrl_ = "";
      /**
       * <code>optional string original_tracking_url = 8;</code>
       * @return Whether the originalTrackingUrl field is set.
       */
      public boolean hasOriginalTrackingUrl() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional string original_tracking_url = 8;</code>
       * @return The originalTrackingUrl.
       */
      public java.lang.String getOriginalTrackingUrl() {
        java.lang.Object ref = originalTrackingUrl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            originalTrackingUrl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string original_tracking_url = 8;</code>
       * @return The bytes for originalTrackingUrl.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getOriginalTrackingUrlBytes() {
        java.lang.Object ref = originalTrackingUrl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          originalTrackingUrl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string original_tracking_url = 8;</code>
       * @param value The originalTrackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setOriginalTrackingUrl(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        originalTrackingUrl_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional string original_tracking_url = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearOriginalTrackingUrl() {
        originalTrackingUrl_ = getDefaultInstance().getOriginalTrackingUrl();
        bitField0_ = (bitField0_ & ~0x00000080);
        onChanged();
        return this;
      }
      /**
       * <code>optional string original_tracking_url = 8;</code>
       * @param value The bytes for originalTrackingUrl to set.
       * @return This builder for chaining.
       */
      public Builder setOriginalTrackingUrlBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        originalTrackingUrl_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }

      private long startTime_ ;
      /**
       * <code>optional int64 startTime = 9;</code>
       * @return Whether the startTime field is set.
       */
      @java.lang.Override
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional int64 startTime = 9;</code>
       * @return The startTime.
       */
      @java.lang.Override
      public long getStartTime() {
        return startTime_;
      }
      /**
       * <code>optional int64 startTime = 9;</code>
       * @param value The startTime to set.
       * @return This builder for chaining.
       */
      public Builder setStartTime(long value) {

        startTime_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 startTime = 9;</code>
       * @return This builder for chaining.
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000100);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      private long finishTime_ ;
      /**
       * <code>optional int64 finishTime = 10;</code>
       * @return Whether the finishTime field is set.
       */
      @java.lang.Override
      public boolean hasFinishTime() {
        return ((bitField0_ & 0x00000200) != 0);
      }
      /**
       * <code>optional int64 finishTime = 10;</code>
       * @return The finishTime.
       */
      @java.lang.Override
      public long getFinishTime() {
        return finishTime_;
      }
      /**
       * <code>optional int64 finishTime = 10;</code>
       * @param value The finishTime to set.
       * @return This builder for chaining.
       */
      public Builder setFinishTime(long value) {

        finishTime_ = value;
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 finishTime = 10;</code>
       * @return This builder for chaining.
       */
      public Builder clearFinishTime() {
        bitField0_ = (bitField0_ & ~0x00000200);
        finishTime_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationAttemptReportProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationAttemptReportProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationAttemptReportProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ApplicationAttemptReportProto>() {
      @java.lang.Override
      public ApplicationAttemptReportProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationAttemptReportProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationAttemptReportProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptReportProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeIdProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeIdProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string host = 1;</code>
     * @return Whether the host field is set.
     */
    boolean hasHost();
    /**
     * <code>optional string host = 1;</code>
     * @return The host.
     */
    java.lang.String getHost();
    /**
     * <code>optional string host = 1;</code>
     * @return The bytes for host.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes();

    /**
     * <code>optional int32 port = 2;</code>
     * @return Whether the port field is set.
     */
    boolean hasPort();
    /**
     * <code>optional int32 port = 2;</code>
     * @return The port.
     */
    int getPort();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeIdProto}
   */
  public static final class NodeIdProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeIdProto)
      NodeIdProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeIdProto.newBuilder() to construct.
    private NodeIdProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeIdProto() {
      host_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeIdProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder.class);
    }

    private int bitField0_;
    public static final int HOST_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object host_ = "";
    /**
     * <code>optional string host = 1;</code>
     * @return Whether the host field is set.
     */
    @java.lang.Override
    public boolean hasHost() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string host = 1;</code>
     * @return The host.
     */
    @java.lang.Override
    public java.lang.String getHost() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          host_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string host = 1;</code>
     * @return The bytes for host.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostBytes() {
      java.lang.Object ref = host_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        host_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int PORT_FIELD_NUMBER = 2;
    private int port_ = 0;
    /**
     * <code>optional int32 port = 2;</code>
     * @return Whether the port field is set.
     */
    @java.lang.Override
    public boolean hasPort() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 port = 2;</code>
     * @return The port.
     */
    @java.lang.Override
    public int getPort() {
      return port_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, host_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, port_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, host_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, port_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto other = (org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto) obj;

      if (hasHost() != other.hasHost()) return false;
      if (hasHost()) {
        if (!getHost()
            .equals(other.getHost())) return false;
      }
      if (hasPort() != other.hasPort()) return false;
      if (hasPort()) {
        if (getPort()
            != other.getPort()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasHost()) {
        hash = (37 * hash) + HOST_FIELD_NUMBER;
        hash = (53 * hash) + getHost().hashCode();
      }
      if (hasPort()) {
        hash = (37 * hash) + PORT_FIELD_NUMBER;
        hash = (53 * hash) + getPort();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeIdProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeIdProto)
        org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        host_ = "";
        port_ = 0;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.host_ = host_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.port_ = port_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) return this;
        if (other.hasHost()) {
          host_ = other.host_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasPort()) {
          setPort(other.getPort());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                host_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                port_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object host_ = "";
      /**
       * <code>optional string host = 1;</code>
       * @return Whether the host field is set.
       */
      public boolean hasHost() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string host = 1;</code>
       * @return The host.
       */
      public java.lang.String getHost() {
        java.lang.Object ref = host_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            host_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string host = 1;</code>
       * @return The bytes for host.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHostBytes() {
        java.lang.Object ref = host_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          host_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string host = 1;</code>
       * @param value The host to set.
       * @return This builder for chaining.
       */
      public Builder setHost(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        host_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string host = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearHost() {
        host_ = getDefaultInstance().getHost();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string host = 1;</code>
       * @param value The bytes for host to set.
       * @return This builder for chaining.
       */
      public Builder setHostBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        host_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private int port_ ;
      /**
       * <code>optional int32 port = 2;</code>
       * @return Whether the port field is set.
       */
      @java.lang.Override
      public boolean hasPort() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 port = 2;</code>
       * @return The port.
       */
      @java.lang.Override
      public int getPort() {
        return port_;
      }
      /**
       * <code>optional int32 port = 2;</code>
       * @param value The port to set.
       * @return This builder for chaining.
       */
      public Builder setPort(int value) {

        port_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 port = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearPort() {
        bitField0_ = (bitField0_ & ~0x00000002);
        port_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeIdProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeIdProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeIdProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeIdProto>() {
      @java.lang.Override
      public NodeIdProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeIdProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeIdProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeReportProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeReportProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return Whether the nodeId field is set.
     */
    boolean hasNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return The nodeId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();

    /**
     * <code>optional string httpAddress = 2;</code>
     * @return Whether the httpAddress field is set.
     */
    boolean hasHttpAddress();
    /**
     * <code>optional string httpAddress = 2;</code>
     * @return The httpAddress.
     */
    java.lang.String getHttpAddress();
    /**
     * <code>optional string httpAddress = 2;</code>
     * @return The bytes for httpAddress.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHttpAddressBytes();

    /**
     * <code>optional string rackName = 3;</code>
     * @return Whether the rackName field is set.
     */
    boolean hasRackName();
    /**
     * <code>optional string rackName = 3;</code>
     * @return The rackName.
     */
    java.lang.String getRackName();
    /**
     * <code>optional string rackName = 3;</code>
     * @return The bytes for rackName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getRackNameBytes();

    /**
     * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
     * @return Whether the used field is set.
     */
    boolean hasUsed();
    /**
     * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
     * @return The used.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsed();
    /**
     * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     * @return Whether the capability field is set.
     */
    boolean hasCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     * @return The capability.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder();

    /**
     * <code>optional int32 numContainers = 6;</code>
     * @return Whether the numContainers field is set.
     */
    boolean hasNumContainers();
    /**
     * <code>optional int32 numContainers = 6;</code>
     * @return The numContainers.
     */
    int getNumContainers();

    /**
     * <code>optional .hadoop.yarn.NodeStateProto node_state = 7;</code>
     * @return Whether the nodeState field is set.
     */
    boolean hasNodeState();
    /**
     * <code>optional .hadoop.yarn.NodeStateProto node_state = 7;</code>
     * @return The nodeState.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeState();

    /**
     * <code>optional string health_report = 8;</code>
     * @return Whether the healthReport field is set.
     */
    boolean hasHealthReport();
    /**
     * <code>optional string health_report = 8;</code>
     * @return The healthReport.
     */
    java.lang.String getHealthReport();
    /**
     * <code>optional string health_report = 8;</code>
     * @return The bytes for healthReport.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHealthReportBytes();

    /**
     * <code>optional int64 last_health_report_time = 9;</code>
     * @return Whether the lastHealthReportTime field is set.
     */
    boolean hasLastHealthReportTime();
    /**
     * <code>optional int64 last_health_report_time = 9;</code>
     * @return The lastHealthReportTime.
     */
    long getLastHealthReportTime();

    /**
     * <code>repeated string node_labels = 10;</code>
     * @return A list containing the nodeLabels.
     */
    java.util.List<java.lang.String>
        getNodeLabelsList();
    /**
     * <code>repeated string node_labels = 10;</code>
     * @return The count of nodeLabels.
     */
    int getNodeLabelsCount();
    /**
     * <code>repeated string node_labels = 10;</code>
     * @param index The index of the element to return.
     * @return The nodeLabels at the given index.
     */
    java.lang.String getNodeLabels(int index);
    /**
     * <code>repeated string node_labels = 10;</code>
     * @param index The index of the value to return.
     * @return The bytes of the nodeLabels at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelsBytes(int index);

    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
     * @return Whether the containersUtilization field is set.
     */
    boolean hasContainersUtilization();
    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
     * @return The containersUtilization.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getContainersUtilization();
    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getContainersUtilizationOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
     * @return Whether the nodeUtilization field is set.
     */
    boolean hasNodeUtilization();
    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
     * @return The nodeUtilization.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getNodeUtilization();
    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getNodeUtilizationOrBuilder();

    /**
     * <code>optional uint32 decommissioning_timeout = 13;</code>
     * @return Whether the decommissioningTimeout field is set.
     */
    boolean hasDecommissioningTimeout();
    /**
     * <code>optional uint32 decommissioning_timeout = 13;</code>
     * @return The decommissioningTimeout.
     */
    int getDecommissioningTimeout();

    /**
     * <code>optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14;</code>
     * @return Whether the nodeUpdateType field is set.
     */
    boolean hasNodeUpdateType();
    /**
     * <code>optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14;</code>
     * @return The nodeUpdateType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto getNodeUpdateType();

    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> 
        getNodeAttributesList();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
     */
    int getNodeAttributesCount();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> 
        getNodeAttributesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeReportProto}
   */
  public static final class NodeReportProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeReportProto)
      NodeReportProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeReportProto.newBuilder() to construct.
    private NodeReportProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeReportProto() {
      httpAddress_ = "";
      rackName_ = "";
      nodeState_ = 1;
      healthReport_ = "";
      nodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      nodeUpdateType_ = 0;
      nodeAttributes_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeReportProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeReportProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeReportProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder.class);
    }

    private int bitField0_;
    public static final int NODEID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return Whether the nodeId field is set.
     */
    @java.lang.Override
    public boolean hasNodeId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return The nodeId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }

    public static final int HTTPADDRESS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object httpAddress_ = "";
    /**
     * <code>optional string httpAddress = 2;</code>
     * @return Whether the httpAddress field is set.
     */
    @java.lang.Override
    public boolean hasHttpAddress() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string httpAddress = 2;</code>
     * @return The httpAddress.
     */
    @java.lang.Override
    public java.lang.String getHttpAddress() {
      java.lang.Object ref = httpAddress_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          httpAddress_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string httpAddress = 2;</code>
     * @return The bytes for httpAddress.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHttpAddressBytes() {
      java.lang.Object ref = httpAddress_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        httpAddress_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RACKNAME_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object rackName_ = "";
    /**
     * <code>optional string rackName = 3;</code>
     * @return Whether the rackName field is set.
     */
    @java.lang.Override
    public boolean hasRackName() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string rackName = 3;</code>
     * @return The rackName.
     */
    @java.lang.Override
    public java.lang.String getRackName() {
      java.lang.Object ref = rackName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          rackName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string rackName = 3;</code>
     * @return The bytes for rackName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getRackNameBytes() {
      java.lang.Object ref = rackName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        rackName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int USED_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto used_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
     * @return Whether the used field is set.
     */
    @java.lang.Override
    public boolean hasUsed() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
     * @return The used.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsed() {
      return used_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : used_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedOrBuilder() {
      return used_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : used_;
    }

    public static final int CAPABILITY_FIELD_NUMBER = 5;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     * @return Whether the capability field is set.
     */
    @java.lang.Override
    public boolean hasCapability() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     * @return The capability.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() {
      return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() {
      return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
    }

    public static final int NUMCONTAINERS_FIELD_NUMBER = 6;
    private int numContainers_ = 0;
    /**
     * <code>optional int32 numContainers = 6;</code>
     * @return Whether the numContainers field is set.
     */
    @java.lang.Override
    public boolean hasNumContainers() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional int32 numContainers = 6;</code>
     * @return The numContainers.
     */
    @java.lang.Override
    public int getNumContainers() {
      return numContainers_;
    }

    public static final int NODE_STATE_FIELD_NUMBER = 7;
    private int nodeState_ = 1;
    /**
     * <code>optional .hadoop.yarn.NodeStateProto node_state = 7;</code>
     * @return Whether the nodeState field is set.
     */
    @java.lang.Override public boolean hasNodeState() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.NodeStateProto node_state = 7;</code>
     * @return The nodeState.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeState() {
      org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.forNumber(nodeState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.NS_NEW : result;
    }

    public static final int HEALTH_REPORT_FIELD_NUMBER = 8;
    @SuppressWarnings("serial")
    private volatile java.lang.Object healthReport_ = "";
    /**
     * <code>optional string health_report = 8;</code>
     * @return Whether the healthReport field is set.
     */
    @java.lang.Override
    public boolean hasHealthReport() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional string health_report = 8;</code>
     * @return The healthReport.
     */
    @java.lang.Override
    public java.lang.String getHealthReport() {
      java.lang.Object ref = healthReport_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          healthReport_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string health_report = 8;</code>
     * @return The bytes for healthReport.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHealthReportBytes() {
      java.lang.Object ref = healthReport_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        healthReport_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int LAST_HEALTH_REPORT_TIME_FIELD_NUMBER = 9;
    private long lastHealthReportTime_ = 0L;
    /**
     * <code>optional int64 last_health_report_time = 9;</code>
     * @return Whether the lastHealthReportTime field is set.
     */
    @java.lang.Override
    public boolean hasLastHealthReportTime() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional int64 last_health_report_time = 9;</code>
     * @return The lastHealthReportTime.
     */
    @java.lang.Override
    public long getLastHealthReportTime() {
      return lastHealthReportTime_;
    }

    public static final int NODE_LABELS_FIELD_NUMBER = 10;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList nodeLabels_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string node_labels = 10;</code>
     * @return A list containing the nodeLabels.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getNodeLabelsList() {
      return nodeLabels_;
    }
    /**
     * <code>repeated string node_labels = 10;</code>
     * @return The count of nodeLabels.
     */
    public int getNodeLabelsCount() {
      return nodeLabels_.size();
    }
    /**
     * <code>repeated string node_labels = 10;</code>
     * @param index The index of the element to return.
     * @return The nodeLabels at the given index.
     */
    public java.lang.String getNodeLabels(int index) {
      return nodeLabels_.get(index);
    }
    /**
     * <code>repeated string node_labels = 10;</code>
     * @param index The index of the value to return.
     * @return The bytes of the nodeLabels at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelsBytes(int index) {
      return nodeLabels_.getByteString(index);
    }

    public static final int CONTAINERS_UTILIZATION_FIELD_NUMBER = 11;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto containersUtilization_;
    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
     * @return Whether the containersUtilization field is set.
     */
    @java.lang.Override
    public boolean hasContainersUtilization() {
      return ((bitField0_ & 0x00000200) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
     * @return The containersUtilization.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getContainersUtilization() {
      return containersUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getContainersUtilizationOrBuilder() {
      return containersUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_;
    }

    public static final int NODE_UTILIZATION_FIELD_NUMBER = 12;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto nodeUtilization_;
    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
     * @return Whether the nodeUtilization field is set.
     */
    @java.lang.Override
    public boolean hasNodeUtilization() {
      return ((bitField0_ & 0x00000400) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
     * @return The nodeUtilization.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getNodeUtilization() {
      return nodeUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getNodeUtilizationOrBuilder() {
      return nodeUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_;
    }

    public static final int DECOMMISSIONING_TIMEOUT_FIELD_NUMBER = 13;
    private int decommissioningTimeout_ = 0;
    /**
     * <code>optional uint32 decommissioning_timeout = 13;</code>
     * @return Whether the decommissioningTimeout field is set.
     */
    @java.lang.Override
    public boolean hasDecommissioningTimeout() {
      return ((bitField0_ & 0x00000800) != 0);
    }
    /**
     * <code>optional uint32 decommissioning_timeout = 13;</code>
     * @return The decommissioningTimeout.
     */
    @java.lang.Override
    public int getDecommissioningTimeout() {
      return decommissioningTimeout_;
    }

    public static final int NODE_UPDATE_TYPE_FIELD_NUMBER = 14;
    private int nodeUpdateType_ = 0;
    /**
     * <code>optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14;</code>
     * @return Whether the nodeUpdateType field is set.
     */
    @java.lang.Override public boolean hasNodeUpdateType() {
      return ((bitField0_ & 0x00001000) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14;</code>
     * @return The nodeUpdateType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto getNodeUpdateType() {
      org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto.forNumber(nodeUpdateType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto.NODE_USABLE : result;
    }

    public static final int NODE_ATTRIBUTES_FIELD_NUMBER = 15;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> nodeAttributes_;
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> getNodeAttributesList() {
      return nodeAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> 
        getNodeAttributesOrBuilderList() {
      return nodeAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
     */
    @java.lang.Override
    public int getNodeAttributesCount() {
      return nodeAttributes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index) {
      return nodeAttributes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder(
        int index) {
      return nodeAttributes_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasUsed()) {
        if (!getUsed().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasCapability()) {
        if (!getCapability().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasContainersUtilization()) {
        if (!getContainersUtilization().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasNodeUtilization()) {
        if (!getNodeUtilization().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getNodeAttributesCount(); i++) {
        if (!getNodeAttributes(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getNodeId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, httpAddress_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, rackName_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(4, getUsed());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeMessage(5, getCapability());
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt32(6, numContainers_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeEnum(7, nodeState_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, healthReport_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeInt64(9, lastHealthReportTime_);
      }
      for (int i = 0; i < nodeLabels_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 10, nodeLabels_.getRaw(i));
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        output.writeMessage(11, getContainersUtilization());
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        output.writeMessage(12, getNodeUtilization());
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        output.writeUInt32(13, decommissioningTimeout_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        output.writeEnum(14, nodeUpdateType_);
      }
      for (int i = 0; i < nodeAttributes_.size(); i++) {
        output.writeMessage(15, nodeAttributes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getNodeId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, httpAddress_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, rackName_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getUsed());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(5, getCapability());
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(6, numContainers_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(7, nodeState_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(8, healthReport_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(9, lastHealthReportTime_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < nodeLabels_.size(); i++) {
          dataSize += computeStringSizeNoTag(nodeLabels_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getNodeLabelsList().size();
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(11, getContainersUtilization());
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(12, getNodeUtilization());
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeUInt32Size(13, decommissioningTimeout_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(14, nodeUpdateType_);
      }
      for (int i = 0; i < nodeAttributes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(15, nodeAttributes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto other = (org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto) obj;

      if (hasNodeId() != other.hasNodeId()) return false;
      if (hasNodeId()) {
        if (!getNodeId()
            .equals(other.getNodeId())) return false;
      }
      if (hasHttpAddress() != other.hasHttpAddress()) return false;
      if (hasHttpAddress()) {
        if (!getHttpAddress()
            .equals(other.getHttpAddress())) return false;
      }
      if (hasRackName() != other.hasRackName()) return false;
      if (hasRackName()) {
        if (!getRackName()
            .equals(other.getRackName())) return false;
      }
      if (hasUsed() != other.hasUsed()) return false;
      if (hasUsed()) {
        if (!getUsed()
            .equals(other.getUsed())) return false;
      }
      if (hasCapability() != other.hasCapability()) return false;
      if (hasCapability()) {
        if (!getCapability()
            .equals(other.getCapability())) return false;
      }
      if (hasNumContainers() != other.hasNumContainers()) return false;
      if (hasNumContainers()) {
        if (getNumContainers()
            != other.getNumContainers()) return false;
      }
      if (hasNodeState() != other.hasNodeState()) return false;
      if (hasNodeState()) {
        if (nodeState_ != other.nodeState_) return false;
      }
      if (hasHealthReport() != other.hasHealthReport()) return false;
      if (hasHealthReport()) {
        if (!getHealthReport()
            .equals(other.getHealthReport())) return false;
      }
      if (hasLastHealthReportTime() != other.hasLastHealthReportTime()) return false;
      if (hasLastHealthReportTime()) {
        if (getLastHealthReportTime()
            != other.getLastHealthReportTime()) return false;
      }
      if (!getNodeLabelsList()
          .equals(other.getNodeLabelsList())) return false;
      if (hasContainersUtilization() != other.hasContainersUtilization()) return false;
      if (hasContainersUtilization()) {
        if (!getContainersUtilization()
            .equals(other.getContainersUtilization())) return false;
      }
      if (hasNodeUtilization() != other.hasNodeUtilization()) return false;
      if (hasNodeUtilization()) {
        if (!getNodeUtilization()
            .equals(other.getNodeUtilization())) return false;
      }
      if (hasDecommissioningTimeout() != other.hasDecommissioningTimeout()) return false;
      if (hasDecommissioningTimeout()) {
        if (getDecommissioningTimeout()
            != other.getDecommissioningTimeout()) return false;
      }
      if (hasNodeUpdateType() != other.hasNodeUpdateType()) return false;
      if (hasNodeUpdateType()) {
        if (nodeUpdateType_ != other.nodeUpdateType_) return false;
      }
      if (!getNodeAttributesList()
          .equals(other.getNodeAttributesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNodeId()) {
        hash = (37 * hash) + NODEID_FIELD_NUMBER;
        hash = (53 * hash) + getNodeId().hashCode();
      }
      if (hasHttpAddress()) {
        hash = (37 * hash) + HTTPADDRESS_FIELD_NUMBER;
        hash = (53 * hash) + getHttpAddress().hashCode();
      }
      if (hasRackName()) {
        hash = (37 * hash) + RACKNAME_FIELD_NUMBER;
        hash = (53 * hash) + getRackName().hashCode();
      }
      if (hasUsed()) {
        hash = (37 * hash) + USED_FIELD_NUMBER;
        hash = (53 * hash) + getUsed().hashCode();
      }
      if (hasCapability()) {
        hash = (37 * hash) + CAPABILITY_FIELD_NUMBER;
        hash = (53 * hash) + getCapability().hashCode();
      }
      if (hasNumContainers()) {
        hash = (37 * hash) + NUMCONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getNumContainers();
      }
      if (hasNodeState()) {
        hash = (37 * hash) + NODE_STATE_FIELD_NUMBER;
        hash = (53 * hash) + nodeState_;
      }
      if (hasHealthReport()) {
        hash = (37 * hash) + HEALTH_REPORT_FIELD_NUMBER;
        hash = (53 * hash) + getHealthReport().hashCode();
      }
      if (hasLastHealthReportTime()) {
        hash = (37 * hash) + LAST_HEALTH_REPORT_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getLastHealthReportTime());
      }
      if (getNodeLabelsCount() > 0) {
        hash = (37 * hash) + NODE_LABELS_FIELD_NUMBER;
        hash = (53 * hash) + getNodeLabelsList().hashCode();
      }
      if (hasContainersUtilization()) {
        hash = (37 * hash) + CONTAINERS_UTILIZATION_FIELD_NUMBER;
        hash = (53 * hash) + getContainersUtilization().hashCode();
      }
      if (hasNodeUtilization()) {
        hash = (37 * hash) + NODE_UTILIZATION_FIELD_NUMBER;
        hash = (53 * hash) + getNodeUtilization().hashCode();
      }
      if (hasDecommissioningTimeout()) {
        hash = (37 * hash) + DECOMMISSIONING_TIMEOUT_FIELD_NUMBER;
        hash = (53 * hash) + getDecommissioningTimeout();
      }
      if (hasNodeUpdateType()) {
        hash = (37 * hash) + NODE_UPDATE_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + nodeUpdateType_;
      }
      if (getNodeAttributesCount() > 0) {
        hash = (37 * hash) + NODE_ATTRIBUTES_FIELD_NUMBER;
        hash = (53 * hash) + getNodeAttributesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeReportProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeReportProto)
        org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeReportProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeReportProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getNodeIdFieldBuilder();
          getUsedFieldBuilder();
          getCapabilityFieldBuilder();
          getContainersUtilizationFieldBuilder();
          getNodeUtilizationFieldBuilder();
          getNodeAttributesFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        httpAddress_ = "";
        rackName_ = "";
        used_ = null;
        if (usedBuilder_ != null) {
          usedBuilder_.dispose();
          usedBuilder_ = null;
        }
        capability_ = null;
        if (capabilityBuilder_ != null) {
          capabilityBuilder_.dispose();
          capabilityBuilder_ = null;
        }
        numContainers_ = 0;
        nodeState_ = 1;
        healthReport_ = "";
        lastHealthReportTime_ = 0L;
        nodeLabels_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        containersUtilization_ = null;
        if (containersUtilizationBuilder_ != null) {
          containersUtilizationBuilder_.dispose();
          containersUtilizationBuilder_ = null;
        }
        nodeUtilization_ = null;
        if (nodeUtilizationBuilder_ != null) {
          nodeUtilizationBuilder_.dispose();
          nodeUtilizationBuilder_ = null;
        }
        decommissioningTimeout_ = 0;
        nodeUpdateType_ = 0;
        if (nodeAttributesBuilder_ == null) {
          nodeAttributes_ = java.util.Collections.emptyList();
        } else {
          nodeAttributes_ = null;
          nodeAttributesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00004000);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeReportProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto result) {
        if (nodeAttributesBuilder_ == null) {
          if (((bitField0_ & 0x00004000) != 0)) {
            nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_);
            bitField0_ = (bitField0_ & ~0x00004000);
          }
          result.nodeAttributes_ = nodeAttributes_;
        } else {
          result.nodeAttributes_ = nodeAttributesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.nodeId_ = nodeIdBuilder_ == null
              ? nodeId_
              : nodeIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.httpAddress_ = httpAddress_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.rackName_ = rackName_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.used_ = usedBuilder_ == null
              ? used_
              : usedBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.capability_ = capabilityBuilder_ == null
              ? capability_
              : capabilityBuilder_.build();
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.numContainers_ = numContainers_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.nodeState_ = nodeState_;
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.healthReport_ = healthReport_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.lastHealthReportTime_ = lastHealthReportTime_;
          to_bitField0_ |= 0x00000100;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          nodeLabels_.makeImmutable();
          result.nodeLabels_ = nodeLabels_;
        }
        if (((from_bitField0_ & 0x00000400) != 0)) {
          result.containersUtilization_ = containersUtilizationBuilder_ == null
              ? containersUtilization_
              : containersUtilizationBuilder_.build();
          to_bitField0_ |= 0x00000200;
        }
        if (((from_bitField0_ & 0x00000800) != 0)) {
          result.nodeUtilization_ = nodeUtilizationBuilder_ == null
              ? nodeUtilization_
              : nodeUtilizationBuilder_.build();
          to_bitField0_ |= 0x00000400;
        }
        if (((from_bitField0_ & 0x00001000) != 0)) {
          result.decommissioningTimeout_ = decommissioningTimeout_;
          to_bitField0_ |= 0x00000800;
        }
        if (((from_bitField0_ & 0x00002000) != 0)) {
          result.nodeUpdateType_ = nodeUpdateType_;
          to_bitField0_ |= 0x00001000;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto.getDefaultInstance()) return this;
        if (other.hasNodeId()) {
          mergeNodeId(other.getNodeId());
        }
        if (other.hasHttpAddress()) {
          httpAddress_ = other.httpAddress_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasRackName()) {
          rackName_ = other.rackName_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasUsed()) {
          mergeUsed(other.getUsed());
        }
        if (other.hasCapability()) {
          mergeCapability(other.getCapability());
        }
        if (other.hasNumContainers()) {
          setNumContainers(other.getNumContainers());
        }
        if (other.hasNodeState()) {
          setNodeState(other.getNodeState());
        }
        if (other.hasHealthReport()) {
          healthReport_ = other.healthReport_;
          bitField0_ |= 0x00000080;
          onChanged();
        }
        if (other.hasLastHealthReportTime()) {
          setLastHealthReportTime(other.getLastHealthReportTime());
        }
        if (!other.nodeLabels_.isEmpty()) {
          if (nodeLabels_.isEmpty()) {
            nodeLabels_ = other.nodeLabels_;
            bitField0_ |= 0x00000200;
          } else {
            ensureNodeLabelsIsMutable();
            nodeLabels_.addAll(other.nodeLabels_);
          }
          onChanged();
        }
        if (other.hasContainersUtilization()) {
          mergeContainersUtilization(other.getContainersUtilization());
        }
        if (other.hasNodeUtilization()) {
          mergeNodeUtilization(other.getNodeUtilization());
        }
        if (other.hasDecommissioningTimeout()) {
          setDecommissioningTimeout(other.getDecommissioningTimeout());
        }
        if (other.hasNodeUpdateType()) {
          setNodeUpdateType(other.getNodeUpdateType());
        }
        if (nodeAttributesBuilder_ == null) {
          if (!other.nodeAttributes_.isEmpty()) {
            if (nodeAttributes_.isEmpty()) {
              nodeAttributes_ = other.nodeAttributes_;
              bitField0_ = (bitField0_ & ~0x00004000);
            } else {
              ensureNodeAttributesIsMutable();
              nodeAttributes_.addAll(other.nodeAttributes_);
            }
            onChanged();
          }
        } else {
          if (!other.nodeAttributes_.isEmpty()) {
            if (nodeAttributesBuilder_.isEmpty()) {
              nodeAttributesBuilder_.dispose();
              nodeAttributesBuilder_ = null;
              nodeAttributes_ = other.nodeAttributes_;
              bitField0_ = (bitField0_ & ~0x00004000);
              nodeAttributesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNodeAttributesFieldBuilder() : null;
            } else {
              nodeAttributesBuilder_.addAllMessages(other.nodeAttributes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasUsed()) {
          if (!getUsed().isInitialized()) {
            return false;
          }
        }
        if (hasCapability()) {
          if (!getCapability().isInitialized()) {
            return false;
          }
        }
        if (hasContainersUtilization()) {
          if (!getContainersUtilization().isInitialized()) {
            return false;
          }
        }
        if (hasNodeUtilization()) {
          if (!getNodeUtilization().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getNodeAttributesCount(); i++) {
          if (!getNodeAttributes(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getNodeIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                httpAddress_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                rackName_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                input.readMessage(
                    getUsedFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                input.readMessage(
                    getCapabilityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 48: {
                numContainers_ = input.readInt32();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 56: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(7, tmpRaw);
                } else {
                  nodeState_ = tmpRaw;
                  bitField0_ |= 0x00000040;
                }
                break;
              } // case 56
              case 66: {
                healthReport_ = input.readBytes();
                bitField0_ |= 0x00000080;
                break;
              } // case 66
              case 72: {
                lastHealthReportTime_ = input.readInt64();
                bitField0_ |= 0x00000100;
                break;
              } // case 72
              case 82: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureNodeLabelsIsMutable();
                nodeLabels_.add(bs);
                break;
              } // case 82
              case 90: {
                input.readMessage(
                    getContainersUtilizationFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000400;
                break;
              } // case 90
              case 98: {
                input.readMessage(
                    getNodeUtilizationFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000800;
                break;
              } // case 98
              case 104: {
                decommissioningTimeout_ = input.readUInt32();
                bitField0_ |= 0x00001000;
                break;
              } // case 104
              case 112: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(14, tmpRaw);
                } else {
                  nodeUpdateType_ = tmpRaw;
                  bitField0_ |= 0x00002000;
                }
                break;
              } // case 112
              case 122: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.PARSER,
                        extensionRegistry);
                if (nodeAttributesBuilder_ == null) {
                  ensureNodeAttributesIsMutable();
                  nodeAttributes_.add(m);
                } else {
                  nodeAttributesBuilder_.addMessage(m);
                }
                break;
              } // case 122
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       * @return Whether the nodeId field is set.
       */
      public boolean hasNodeId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       * @return The nodeId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
        if (nodeIdBuilder_ == null) {
          return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        } else {
          return nodeIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeId_ = value;
        } else {
          nodeIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder setNodeId(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (nodeIdBuilder_ == null) {
          nodeId_ = builderForValue.build();
        } else {
          nodeIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            nodeId_ != null &&
            nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
            getNodeIdBuilder().mergeFrom(value);
          } else {
            nodeId_ = value;
          }
        } else {
          nodeIdBuilder_.mergeFrom(value);
        }
        if (nodeId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder clearNodeId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getNodeIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
        if (nodeIdBuilder_ != null) {
          return nodeIdBuilder_.getMessageOrBuilder();
        } else {
          return nodeId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
          getNodeIdFieldBuilder() {
        if (nodeIdBuilder_ == null) {
          nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
                  getNodeId(),
                  getParentForChildren(),
                  isClean());
          nodeId_ = null;
        }
        return nodeIdBuilder_;
      }

      private java.lang.Object httpAddress_ = "";
      /**
       * <code>optional string httpAddress = 2;</code>
       * @return Whether the httpAddress field is set.
       */
      public boolean hasHttpAddress() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string httpAddress = 2;</code>
       * @return The httpAddress.
       */
      public java.lang.String getHttpAddress() {
        java.lang.Object ref = httpAddress_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            httpAddress_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string httpAddress = 2;</code>
       * @return The bytes for httpAddress.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHttpAddressBytes() {
        java.lang.Object ref = httpAddress_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          httpAddress_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string httpAddress = 2;</code>
       * @param value The httpAddress to set.
       * @return This builder for chaining.
       */
      public Builder setHttpAddress(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        httpAddress_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string httpAddress = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearHttpAddress() {
        httpAddress_ = getDefaultInstance().getHttpAddress();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string httpAddress = 2;</code>
       * @param value The bytes for httpAddress to set.
       * @return This builder for chaining.
       */
      public Builder setHttpAddressBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        httpAddress_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private java.lang.Object rackName_ = "";
      /**
       * <code>optional string rackName = 3;</code>
       * @return Whether the rackName field is set.
       */
      public boolean hasRackName() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string rackName = 3;</code>
       * @return The rackName.
       */
      public java.lang.String getRackName() {
        java.lang.Object ref = rackName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            rackName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string rackName = 3;</code>
       * @return The bytes for rackName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getRackNameBytes() {
        java.lang.Object ref = rackName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          rackName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string rackName = 3;</code>
       * @param value The rackName to set.
       * @return This builder for chaining.
       */
      public Builder setRackName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        rackName_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string rackName = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearRackName() {
        rackName_ = getDefaultInstance().getRackName();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string rackName = 3;</code>
       * @param value The bytes for rackName to set.
       * @return This builder for chaining.
       */
      public Builder setRackNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        rackName_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto used_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> usedBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
       * @return Whether the used field is set.
       */
      public boolean hasUsed() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
       * @return The used.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getUsed() {
        if (usedBuilder_ == null) {
          return used_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : used_;
        } else {
          return usedBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
       */
      public Builder setUsed(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (usedBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          used_ = value;
        } else {
          usedBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
       */
      public Builder setUsed(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (usedBuilder_ == null) {
          used_ = builderForValue.build();
        } else {
          usedBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
       */
      public Builder mergeUsed(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (usedBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
            used_ != null &&
            used_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getUsedBuilder().mergeFrom(value);
          } else {
            used_ = value;
          }
        } else {
          usedBuilder_.mergeFrom(value);
        }
        if (used_ != null) {
          bitField0_ |= 0x00000008;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
       */
      public Builder clearUsed() {
        bitField0_ = (bitField0_ & ~0x00000008);
        used_ = null;
        if (usedBuilder_ != null) {
          usedBuilder_.dispose();
          usedBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getUsedBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getUsedFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getUsedOrBuilder() {
        if (usedBuilder_ != null) {
          return usedBuilder_.getMessageOrBuilder();
        } else {
          return used_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : used_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto used = 4;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getUsedFieldBuilder() {
        if (usedBuilder_ == null) {
          usedBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getUsed(),
                  getParentForChildren(),
                  isClean());
          used_ = null;
        }
        return usedBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> capabilityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       * @return Whether the capability field is set.
       */
      public boolean hasCapability() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       * @return The capability.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() {
        if (capabilityBuilder_ == null) {
          return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
        } else {
          return capabilityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public Builder setCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (capabilityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          capability_ = value;
        } else {
          capabilityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public Builder setCapability(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (capabilityBuilder_ == null) {
          capability_ = builderForValue.build();
        } else {
          capabilityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public Builder mergeCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (capabilityBuilder_ == null) {
          if (((bitField0_ & 0x00000010) != 0) &&
            capability_ != null &&
            capability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getCapabilityBuilder().mergeFrom(value);
          } else {
            capability_ = value;
          }
        } else {
          capabilityBuilder_.mergeFrom(value);
        }
        if (capability_ != null) {
          bitField0_ |= 0x00000010;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public Builder clearCapability() {
        bitField0_ = (bitField0_ & ~0x00000010);
        capability_ = null;
        if (capabilityBuilder_ != null) {
          capabilityBuilder_.dispose();
          capabilityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getCapabilityBuilder() {
        bitField0_ |= 0x00000010;
        onChanged();
        return getCapabilityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() {
        if (capabilityBuilder_ != null) {
          return capabilityBuilder_.getMessageOrBuilder();
        } else {
          return capability_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getCapabilityFieldBuilder() {
        if (capabilityBuilder_ == null) {
          capabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getCapability(),
                  getParentForChildren(),
                  isClean());
          capability_ = null;
        }
        return capabilityBuilder_;
      }

      private int numContainers_ ;
      /**
       * <code>optional int32 numContainers = 6;</code>
       * @return Whether the numContainers field is set.
       */
      @java.lang.Override
      public boolean hasNumContainers() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional int32 numContainers = 6;</code>
       * @return The numContainers.
       */
      @java.lang.Override
      public int getNumContainers() {
        return numContainers_;
      }
      /**
       * <code>optional int32 numContainers = 6;</code>
       * @param value The numContainers to set.
       * @return This builder for chaining.
       */
      public Builder setNumContainers(int value) {

        numContainers_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 numContainers = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumContainers() {
        bitField0_ = (bitField0_ & ~0x00000020);
        numContainers_ = 0;
        onChanged();
        return this;
      }

      private int nodeState_ = 1;
      /**
       * <code>optional .hadoop.yarn.NodeStateProto node_state = 7;</code>
       * @return Whether the nodeState field is set.
       */
      @java.lang.Override public boolean hasNodeState() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.NodeStateProto node_state = 7;</code>
       * @return The nodeState.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto getNodeState() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.forNumber(nodeState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto.NS_NEW : result;
      }
      /**
       * <code>optional .hadoop.yarn.NodeStateProto node_state = 7;</code>
       * @param value The nodeState to set.
       * @return This builder for chaining.
       */
      public Builder setNodeState(org.apache.hadoop.yarn.proto.YarnProtos.NodeStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000040;
        nodeState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeStateProto node_state = 7;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeState() {
        bitField0_ = (bitField0_ & ~0x00000040);
        nodeState_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object healthReport_ = "";
      /**
       * <code>optional string health_report = 8;</code>
       * @return Whether the healthReport field is set.
       */
      public boolean hasHealthReport() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional string health_report = 8;</code>
       * @return The healthReport.
       */
      public java.lang.String getHealthReport() {
        java.lang.Object ref = healthReport_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            healthReport_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string health_report = 8;</code>
       * @return The bytes for healthReport.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHealthReportBytes() {
        java.lang.Object ref = healthReport_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          healthReport_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string health_report = 8;</code>
       * @param value The healthReport to set.
       * @return This builder for chaining.
       */
      public Builder setHealthReport(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        healthReport_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional string health_report = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearHealthReport() {
        healthReport_ = getDefaultInstance().getHealthReport();
        bitField0_ = (bitField0_ & ~0x00000080);
        onChanged();
        return this;
      }
      /**
       * <code>optional string health_report = 8;</code>
       * @param value The bytes for healthReport to set.
       * @return This builder for chaining.
       */
      public Builder setHealthReportBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        healthReport_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }

      private long lastHealthReportTime_ ;
      /**
       * <code>optional int64 last_health_report_time = 9;</code>
       * @return Whether the lastHealthReportTime field is set.
       */
      @java.lang.Override
      public boolean hasLastHealthReportTime() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional int64 last_health_report_time = 9;</code>
       * @return The lastHealthReportTime.
       */
      @java.lang.Override
      public long getLastHealthReportTime() {
        return lastHealthReportTime_;
      }
      /**
       * <code>optional int64 last_health_report_time = 9;</code>
       * @param value The lastHealthReportTime to set.
       * @return This builder for chaining.
       */
      public Builder setLastHealthReportTime(long value) {

        lastHealthReportTime_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 last_health_report_time = 9;</code>
       * @return This builder for chaining.
       */
      public Builder clearLastHealthReportTime() {
        bitField0_ = (bitField0_ & ~0x00000100);
        lastHealthReportTime_ = 0L;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList nodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureNodeLabelsIsMutable() {
        if (!nodeLabels_.isModifiable()) {
          nodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(nodeLabels_);
        }
        bitField0_ |= 0x00000200;
      }
      /**
       * <code>repeated string node_labels = 10;</code>
       * @return A list containing the nodeLabels.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getNodeLabelsList() {
        nodeLabels_.makeImmutable();
        return nodeLabels_;
      }
      /**
       * <code>repeated string node_labels = 10;</code>
       * @return The count of nodeLabels.
       */
      public int getNodeLabelsCount() {
        return nodeLabels_.size();
      }
      /**
       * <code>repeated string node_labels = 10;</code>
       * @param index The index of the element to return.
       * @return The nodeLabels at the given index.
       */
      public java.lang.String getNodeLabels(int index) {
        return nodeLabels_.get(index);
      }
      /**
       * <code>repeated string node_labels = 10;</code>
       * @param index The index of the value to return.
       * @return The bytes of the nodeLabels at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNodeLabelsBytes(int index) {
        return nodeLabels_.getByteString(index);
      }
      /**
       * <code>repeated string node_labels = 10;</code>
       * @param index The index to set the value at.
       * @param value The nodeLabels to set.
       * @return This builder for chaining.
       */
      public Builder setNodeLabels(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureNodeLabelsIsMutable();
        nodeLabels_.set(index, value);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string node_labels = 10;</code>
       * @param value The nodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addNodeLabels(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureNodeLabelsIsMutable();
        nodeLabels_.add(value);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string node_labels = 10;</code>
       * @param values The nodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addAllNodeLabels(
          java.lang.Iterable<java.lang.String> values) {
        ensureNodeLabelsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, nodeLabels_);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string node_labels = 10;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeLabels() {
        nodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000200);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string node_labels = 10;</code>
       * @param value The bytes of the nodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addNodeLabelsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureNodeLabelsIsMutable();
        nodeLabels_.add(value);
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto containersUtilization_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> containersUtilizationBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
       * @return Whether the containersUtilization field is set.
       */
      public boolean hasContainersUtilization() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
       * @return The containersUtilization.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getContainersUtilization() {
        if (containersUtilizationBuilder_ == null) {
          return containersUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_;
        } else {
          return containersUtilizationBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
       */
      public Builder setContainersUtilization(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) {
        if (containersUtilizationBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containersUtilization_ = value;
        } else {
          containersUtilizationBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
       */
      public Builder setContainersUtilization(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder builderForValue) {
        if (containersUtilizationBuilder_ == null) {
          containersUtilization_ = builderForValue.build();
        } else {
          containersUtilizationBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
       */
      public Builder mergeContainersUtilization(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) {
        if (containersUtilizationBuilder_ == null) {
          if (((bitField0_ & 0x00000400) != 0) &&
            containersUtilization_ != null &&
            containersUtilization_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance()) {
            getContainersUtilizationBuilder().mergeFrom(value);
          } else {
            containersUtilization_ = value;
          }
        } else {
          containersUtilizationBuilder_.mergeFrom(value);
        }
        if (containersUtilization_ != null) {
          bitField0_ |= 0x00000400;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
       */
      public Builder clearContainersUtilization() {
        bitField0_ = (bitField0_ & ~0x00000400);
        containersUtilization_ = null;
        if (containersUtilizationBuilder_ != null) {
          containersUtilizationBuilder_.dispose();
          containersUtilizationBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder getContainersUtilizationBuilder() {
        bitField0_ |= 0x00000400;
        onChanged();
        return getContainersUtilizationFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getContainersUtilizationOrBuilder() {
        if (containersUtilizationBuilder_ != null) {
          return containersUtilizationBuilder_.getMessageOrBuilder();
        } else {
          return containersUtilization_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : containersUtilization_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto containers_utilization = 11;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> 
          getContainersUtilizationFieldBuilder() {
        if (containersUtilizationBuilder_ == null) {
          containersUtilizationBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder>(
                  getContainersUtilization(),
                  getParentForChildren(),
                  isClean());
          containersUtilization_ = null;
        }
        return containersUtilizationBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto nodeUtilization_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> nodeUtilizationBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
       * @return Whether the nodeUtilization field is set.
       */
      public boolean hasNodeUtilization() {
        return ((bitField0_ & 0x00000800) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
       * @return The nodeUtilization.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto getNodeUtilization() {
        if (nodeUtilizationBuilder_ == null) {
          return nodeUtilization_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_;
        } else {
          return nodeUtilizationBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
       */
      public Builder setNodeUtilization(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) {
        if (nodeUtilizationBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeUtilization_ = value;
        } else {
          nodeUtilizationBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
       */
      public Builder setNodeUtilization(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder builderForValue) {
        if (nodeUtilizationBuilder_ == null) {
          nodeUtilization_ = builderForValue.build();
        } else {
          nodeUtilizationBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
       */
      public Builder mergeNodeUtilization(org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto value) {
        if (nodeUtilizationBuilder_ == null) {
          if (((bitField0_ & 0x00000800) != 0) &&
            nodeUtilization_ != null &&
            nodeUtilization_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance()) {
            getNodeUtilizationBuilder().mergeFrom(value);
          } else {
            nodeUtilization_ = value;
          }
        } else {
          nodeUtilizationBuilder_.mergeFrom(value);
        }
        if (nodeUtilization_ != null) {
          bitField0_ |= 0x00000800;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
       */
      public Builder clearNodeUtilization() {
        bitField0_ = (bitField0_ & ~0x00000800);
        nodeUtilization_ = null;
        if (nodeUtilizationBuilder_ != null) {
          nodeUtilizationBuilder_.dispose();
          nodeUtilizationBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder getNodeUtilizationBuilder() {
        bitField0_ |= 0x00000800;
        onChanged();
        return getNodeUtilizationFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder getNodeUtilizationOrBuilder() {
        if (nodeUtilizationBuilder_ != null) {
          return nodeUtilizationBuilder_.getMessageOrBuilder();
        } else {
          return nodeUtilization_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.getDefaultInstance() : nodeUtilization_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceUtilizationProto node_utilization = 12;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder> 
          getNodeUtilizationFieldBuilder() {
        if (nodeUtilizationBuilder_ == null) {
          nodeUtilizationBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceUtilizationProtoOrBuilder>(
                  getNodeUtilization(),
                  getParentForChildren(),
                  isClean());
          nodeUtilization_ = null;
        }
        return nodeUtilizationBuilder_;
      }

      private int decommissioningTimeout_ ;
      /**
       * <code>optional uint32 decommissioning_timeout = 13;</code>
       * @return Whether the decommissioningTimeout field is set.
       */
      @java.lang.Override
      public boolean hasDecommissioningTimeout() {
        return ((bitField0_ & 0x00001000) != 0);
      }
      /**
       * <code>optional uint32 decommissioning_timeout = 13;</code>
       * @return The decommissioningTimeout.
       */
      @java.lang.Override
      public int getDecommissioningTimeout() {
        return decommissioningTimeout_;
      }
      /**
       * <code>optional uint32 decommissioning_timeout = 13;</code>
       * @param value The decommissioningTimeout to set.
       * @return This builder for chaining.
       */
      public Builder setDecommissioningTimeout(int value) {

        decommissioningTimeout_ = value;
        bitField0_ |= 0x00001000;
        onChanged();
        return this;
      }
      /**
       * <code>optional uint32 decommissioning_timeout = 13;</code>
       * @return This builder for chaining.
       */
      public Builder clearDecommissioningTimeout() {
        bitField0_ = (bitField0_ & ~0x00001000);
        decommissioningTimeout_ = 0;
        onChanged();
        return this;
      }

      private int nodeUpdateType_ = 0;
      /**
       * <code>optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14;</code>
       * @return Whether the nodeUpdateType field is set.
       */
      @java.lang.Override public boolean hasNodeUpdateType() {
        return ((bitField0_ & 0x00002000) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14;</code>
       * @return The nodeUpdateType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto getNodeUpdateType() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto.forNumber(nodeUpdateType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto.NODE_USABLE : result;
      }
      /**
       * <code>optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14;</code>
       * @param value The nodeUpdateType to set.
       * @return This builder for chaining.
       */
      public Builder setNodeUpdateType(org.apache.hadoop.yarn.proto.YarnProtos.NodeUpdateTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00002000;
        nodeUpdateType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeUpdateTypeProto node_update_type = 14;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeUpdateType() {
        bitField0_ = (bitField0_ & ~0x00002000);
        nodeUpdateType_ = 0;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> nodeAttributes_ =
        java.util.Collections.emptyList();
      private void ensureNodeAttributesIsMutable() {
        if (!((bitField0_ & 0x00004000) != 0)) {
          nodeAttributes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto>(nodeAttributes_);
          bitField0_ |= 0x00004000;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> nodeAttributesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> getNodeAttributesList() {
        if (nodeAttributesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nodeAttributes_);
        } else {
          return nodeAttributesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public int getNodeAttributesCount() {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.size();
        } else {
          return nodeAttributesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index) {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.get(index);
        } else {
          return nodeAttributesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public Builder setNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.set(index, value);
          onChanged();
        } else {
          nodeAttributesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public Builder setNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.set(index, builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public Builder addNodeAttributes(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(value);
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public Builder addNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(index, value);
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public Builder addNodeAttributes(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public Builder addNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(index, builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public Builder addAllNodeAttributes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> values) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nodeAttributes_);
          onChanged();
        } else {
          nodeAttributesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public Builder clearNodeAttributes() {
        if (nodeAttributesBuilder_ == null) {
          nodeAttributes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00004000);
          onChanged();
        } else {
          nodeAttributesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public Builder removeNodeAttributes(int index) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.remove(index);
          onChanged();
        } else {
          nodeAttributesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder getNodeAttributesBuilder(
          int index) {
        return getNodeAttributesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder(
          int index) {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.get(index);  } else {
          return nodeAttributesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> 
           getNodeAttributesOrBuilderList() {
        if (nodeAttributesBuilder_ != null) {
          return nodeAttributesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nodeAttributes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder addNodeAttributesBuilder() {
        return getNodeAttributesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder addNodeAttributesBuilder(
          int index) {
        return getNodeAttributesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto node_attributes = 15;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder> 
           getNodeAttributesBuilderList() {
        return getNodeAttributesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> 
          getNodeAttributesFieldBuilder() {
        if (nodeAttributesBuilder_ == null) {
          nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder>(
                  nodeAttributes_,
                  ((bitField0_ & 0x00004000) != 0),
                  getParentForChildren(),
                  isClean());
          nodeAttributes_ = null;
        }
        return nodeAttributesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeReportProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeReportProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeReportProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeReportProto>() {
      @java.lang.Override
      public NodeReportProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeReportProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeReportProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeIdToLabelsProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeIdToLabelsProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return Whether the nodeId field is set.
     */
    boolean hasNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return The nodeId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId();
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder();

    /**
     * <code>repeated string nodeLabels = 2;</code>
     * @return A list containing the nodeLabels.
     */
    java.util.List<java.lang.String>
        getNodeLabelsList();
    /**
     * <code>repeated string nodeLabels = 2;</code>
     * @return The count of nodeLabels.
     */
    int getNodeLabelsCount();
    /**
     * <code>repeated string nodeLabels = 2;</code>
     * @param index The index of the element to return.
     * @return The nodeLabels at the given index.
     */
    java.lang.String getNodeLabels(int index);
    /**
     * <code>repeated string nodeLabels = 2;</code>
     * @param index The index of the value to return.
     * @return The bytes of the nodeLabels at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelsBytes(int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeIdToLabelsProto}
   */
  public static final class NodeIdToLabelsProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeIdToLabelsProto)
      NodeIdToLabelsProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeIdToLabelsProto.newBuilder() to construct.
    private NodeIdToLabelsProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeIdToLabelsProto() {
      nodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeIdToLabelsProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdToLabelsProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder.class);
    }

    private int bitField0_;
    public static final int NODEID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return Whether the nodeId field is set.
     */
    @java.lang.Override
    public boolean hasNodeId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     * @return The nodeId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }
    /**
     * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
      return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
    }

    public static final int NODELABELS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList nodeLabels_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string nodeLabels = 2;</code>
     * @return A list containing the nodeLabels.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getNodeLabelsList() {
      return nodeLabels_;
    }
    /**
     * <code>repeated string nodeLabels = 2;</code>
     * @return The count of nodeLabels.
     */
    public int getNodeLabelsCount() {
      return nodeLabels_.size();
    }
    /**
     * <code>repeated string nodeLabels = 2;</code>
     * @param index The index of the element to return.
     * @return The nodeLabels at the given index.
     */
    public java.lang.String getNodeLabels(int index) {
      return nodeLabels_.get(index);
    }
    /**
     * <code>repeated string nodeLabels = 2;</code>
     * @param index The index of the value to return.
     * @return The bytes of the nodeLabels at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelsBytes(int index) {
      return nodeLabels_.getByteString(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getNodeId());
      }
      for (int i = 0; i < nodeLabels_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, nodeLabels_.getRaw(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getNodeId());
      }
      {
        int dataSize = 0;
        for (int i = 0; i < nodeLabels_.size(); i++) {
          dataSize += computeStringSizeNoTag(nodeLabels_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getNodeLabelsList().size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto other = (org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto) obj;

      if (hasNodeId() != other.hasNodeId()) return false;
      if (hasNodeId()) {
        if (!getNodeId()
            .equals(other.getNodeId())) return false;
      }
      if (!getNodeLabelsList()
          .equals(other.getNodeLabelsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNodeId()) {
        hash = (37 * hash) + NODEID_FIELD_NUMBER;
        hash = (53 * hash) + getNodeId().hashCode();
      }
      if (getNodeLabelsCount() > 0) {
        hash = (37 * hash) + NODELABELS_FIELD_NUMBER;
        hash = (53 * hash) + getNodeLabelsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeIdToLabelsProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeIdToLabelsProto)
        org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdToLabelsProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getNodeIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        nodeLabels_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.nodeId_ = nodeIdBuilder_ == null
              ? nodeId_
              : nodeIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          nodeLabels_.makeImmutable();
          result.nodeLabels_ = nodeLabels_;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto.getDefaultInstance()) return this;
        if (other.hasNodeId()) {
          mergeNodeId(other.getNodeId());
        }
        if (!other.nodeLabels_.isEmpty()) {
          if (nodeLabels_.isEmpty()) {
            nodeLabels_ = other.nodeLabels_;
            bitField0_ |= 0x00000002;
          } else {
            ensureNodeLabelsIsMutable();
            nodeLabels_.addAll(other.nodeLabels_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getNodeIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureNodeLabelsIsMutable();
                nodeLabels_.add(bs);
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto nodeId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       * @return Whether the nodeId field is set.
       */
      public boolean hasNodeId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       * @return The nodeId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId() {
        if (nodeIdBuilder_ == null) {
          return nodeId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        } else {
          return nodeIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder setNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeId_ = value;
        } else {
          nodeIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder setNodeId(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (nodeIdBuilder_ == null) {
          nodeId_ = builderForValue.build();
        } else {
          nodeIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder mergeNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            nodeId_ != null &&
            nodeId_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance()) {
            getNodeIdBuilder().mergeFrom(value);
          } else {
            nodeId_ = value;
          }
        } else {
          nodeIdBuilder_.mergeFrom(value);
        }
        if (nodeId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public Builder clearNodeId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        nodeId_ = null;
        if (nodeIdBuilder_ != null) {
          nodeIdBuilder_.dispose();
          nodeIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getNodeIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder() {
        if (nodeIdBuilder_ != null) {
          return nodeIdBuilder_.getMessageOrBuilder();
        } else {
          return nodeId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance() : nodeId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.NodeIdProto nodeId = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
          getNodeIdFieldBuilder() {
        if (nodeIdBuilder_ == null) {
          nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
                  getNodeId(),
                  getParentForChildren(),
                  isClean());
          nodeId_ = null;
        }
        return nodeIdBuilder_;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList nodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureNodeLabelsIsMutable() {
        if (!nodeLabels_.isModifiable()) {
          nodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(nodeLabels_);
        }
        bitField0_ |= 0x00000002;
      }
      /**
       * <code>repeated string nodeLabels = 2;</code>
       * @return A list containing the nodeLabels.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getNodeLabelsList() {
        nodeLabels_.makeImmutable();
        return nodeLabels_;
      }
      /**
       * <code>repeated string nodeLabels = 2;</code>
       * @return The count of nodeLabels.
       */
      public int getNodeLabelsCount() {
        return nodeLabels_.size();
      }
      /**
       * <code>repeated string nodeLabels = 2;</code>
       * @param index The index of the element to return.
       * @return The nodeLabels at the given index.
       */
      public java.lang.String getNodeLabels(int index) {
        return nodeLabels_.get(index);
      }
      /**
       * <code>repeated string nodeLabels = 2;</code>
       * @param index The index of the value to return.
       * @return The bytes of the nodeLabels at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNodeLabelsBytes(int index) {
        return nodeLabels_.getByteString(index);
      }
      /**
       * <code>repeated string nodeLabels = 2;</code>
       * @param index The index to set the value at.
       * @param value The nodeLabels to set.
       * @return This builder for chaining.
       */
      public Builder setNodeLabels(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureNodeLabelsIsMutable();
        nodeLabels_.set(index, value);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string nodeLabels = 2;</code>
       * @param value The nodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addNodeLabels(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureNodeLabelsIsMutable();
        nodeLabels_.add(value);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string nodeLabels = 2;</code>
       * @param values The nodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addAllNodeLabels(
          java.lang.Iterable<java.lang.String> values) {
        ensureNodeLabelsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, nodeLabels_);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string nodeLabels = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeLabels() {
        nodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000002);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string nodeLabels = 2;</code>
       * @param value The bytes of the nodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addNodeLabelsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureNodeLabelsIsMutable();
        nodeLabels_.add(value);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeIdToLabelsProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeIdToLabelsProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeIdToLabelsProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeIdToLabelsProto>() {
      @java.lang.Override
      public NodeIdToLabelsProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeIdToLabelsProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeIdToLabelsProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdToLabelsProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface LabelsToNodeIdsProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.LabelsToNodeIdsProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string nodeLabels = 1;</code>
     * @return Whether the nodeLabels field is set.
     */
    boolean hasNodeLabels();
    /**
     * <code>optional string nodeLabels = 1;</code>
     * @return The nodeLabels.
     */
    java.lang.String getNodeLabels();
    /**
     * <code>optional string nodeLabels = 1;</code>
     * @return The bytes for nodeLabels.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelsBytes();

    /**
     * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto> 
        getNodeIdList();
    /**
     * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    int getNodeIdCount();
    /**
     * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
        getNodeIdOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.LabelsToNodeIdsProto}
   */
  public static final class LabelsToNodeIdsProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.LabelsToNodeIdsProto)
      LabelsToNodeIdsProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use LabelsToNodeIdsProto.newBuilder() to construct.
    private LabelsToNodeIdsProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private LabelsToNodeIdsProto() {
      nodeLabels_ = "";
      nodeId_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new LabelsToNodeIdsProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LabelsToNodeIdsProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder.class);
    }

    private int bitField0_;
    public static final int NODELABELS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object nodeLabels_ = "";
    /**
     * <code>optional string nodeLabels = 1;</code>
     * @return Whether the nodeLabels field is set.
     */
    @java.lang.Override
    public boolean hasNodeLabels() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string nodeLabels = 1;</code>
     * @return The nodeLabels.
     */
    @java.lang.Override
    public java.lang.String getNodeLabels() {
      java.lang.Object ref = nodeLabels_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          nodeLabels_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string nodeLabels = 1;</code>
     * @return The bytes for nodeLabels.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelsBytes() {
      java.lang.Object ref = nodeLabels_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        nodeLabels_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int NODEID_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto> nodeId_;
    /**
     * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto> getNodeIdList() {
      return nodeId_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
        getNodeIdOrBuilderList() {
      return nodeId_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    @java.lang.Override
    public int getNodeIdCount() {
      return nodeId_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(int index) {
      return nodeId_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder(
        int index) {
      return nodeId_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, nodeLabels_);
      }
      for (int i = 0; i < nodeId_.size(); i++) {
        output.writeMessage(2, nodeId_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, nodeLabels_);
      }
      for (int i = 0; i < nodeId_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, nodeId_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto other = (org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto) obj;

      if (hasNodeLabels() != other.hasNodeLabels()) return false;
      if (hasNodeLabels()) {
        if (!getNodeLabels()
            .equals(other.getNodeLabels())) return false;
      }
      if (!getNodeIdList()
          .equals(other.getNodeIdList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNodeLabels()) {
        hash = (37 * hash) + NODELABELS_FIELD_NUMBER;
        hash = (53 * hash) + getNodeLabels().hashCode();
      }
      if (getNodeIdCount() > 0) {
        hash = (37 * hash) + NODEID_FIELD_NUMBER;
        hash = (53 * hash) + getNodeIdList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.LabelsToNodeIdsProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.LabelsToNodeIdsProto)
        org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LabelsToNodeIdsProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        nodeLabels_ = "";
        if (nodeIdBuilder_ == null) {
          nodeId_ = java.util.Collections.emptyList();
        } else {
          nodeId_ = null;
          nodeIdBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto result) {
        if (nodeIdBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            nodeId_ = java.util.Collections.unmodifiableList(nodeId_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.nodeId_ = nodeId_;
        } else {
          result.nodeId_ = nodeIdBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.nodeLabels_ = nodeLabels_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto.getDefaultInstance()) return this;
        if (other.hasNodeLabels()) {
          nodeLabels_ = other.nodeLabels_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (nodeIdBuilder_ == null) {
          if (!other.nodeId_.isEmpty()) {
            if (nodeId_.isEmpty()) {
              nodeId_ = other.nodeId_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureNodeIdIsMutable();
              nodeId_.addAll(other.nodeId_);
            }
            onChanged();
          }
        } else {
          if (!other.nodeId_.isEmpty()) {
            if (nodeIdBuilder_.isEmpty()) {
              nodeIdBuilder_.dispose();
              nodeIdBuilder_ = null;
              nodeId_ = other.nodeId_;
              bitField0_ = (bitField0_ & ~0x00000002);
              nodeIdBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNodeIdFieldBuilder() : null;
            } else {
              nodeIdBuilder_.addAllMessages(other.nodeId_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                nodeLabels_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.PARSER,
                        extensionRegistry);
                if (nodeIdBuilder_ == null) {
                  ensureNodeIdIsMutable();
                  nodeId_.add(m);
                } else {
                  nodeIdBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object nodeLabels_ = "";
      /**
       * <code>optional string nodeLabels = 1;</code>
       * @return Whether the nodeLabels field is set.
       */
      public boolean hasNodeLabels() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string nodeLabels = 1;</code>
       * @return The nodeLabels.
       */
      public java.lang.String getNodeLabels() {
        java.lang.Object ref = nodeLabels_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            nodeLabels_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string nodeLabels = 1;</code>
       * @return The bytes for nodeLabels.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNodeLabelsBytes() {
        java.lang.Object ref = nodeLabels_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          nodeLabels_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string nodeLabels = 1;</code>
       * @param value The nodeLabels to set.
       * @return This builder for chaining.
       */
      public Builder setNodeLabels(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        nodeLabels_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string nodeLabels = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeLabels() {
        nodeLabels_ = getDefaultInstance().getNodeLabels();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string nodeLabels = 1;</code>
       * @param value The bytes for nodeLabels to set.
       * @return This builder for chaining.
       */
      public Builder setNodeLabelsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        nodeLabels_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto> nodeId_ =
        java.util.Collections.emptyList();
      private void ensureNodeIdIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          nodeId_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto>(nodeId_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> nodeIdBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto> getNodeIdList() {
        if (nodeIdBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nodeId_);
        } else {
          return nodeIdBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public int getNodeIdCount() {
        if (nodeIdBuilder_ == null) {
          return nodeId_.size();
        } else {
          return nodeIdBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto getNodeId(int index) {
        if (nodeIdBuilder_ == null) {
          return nodeId_.get(index);
        } else {
          return nodeIdBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder setNodeId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeIdIsMutable();
          nodeId_.set(index, value);
          onChanged();
        } else {
          nodeIdBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder setNodeId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (nodeIdBuilder_ == null) {
          ensureNodeIdIsMutable();
          nodeId_.set(index, builderForValue.build());
          onChanged();
        } else {
          nodeIdBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder addNodeId(org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeIdIsMutable();
          nodeId_.add(value);
          onChanged();
        } else {
          nodeIdBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder addNodeId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto value) {
        if (nodeIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeIdIsMutable();
          nodeId_.add(index, value);
          onChanged();
        } else {
          nodeIdBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder addNodeId(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (nodeIdBuilder_ == null) {
          ensureNodeIdIsMutable();
          nodeId_.add(builderForValue.build());
          onChanged();
        } else {
          nodeIdBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder addNodeId(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder builderForValue) {
        if (nodeIdBuilder_ == null) {
          ensureNodeIdIsMutable();
          nodeId_.add(index, builderForValue.build());
          onChanged();
        } else {
          nodeIdBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder addAllNodeId(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto> values) {
        if (nodeIdBuilder_ == null) {
          ensureNodeIdIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nodeId_);
          onChanged();
        } else {
          nodeIdBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder clearNodeId() {
        if (nodeIdBuilder_ == null) {
          nodeId_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          nodeIdBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public Builder removeNodeId(int index) {
        if (nodeIdBuilder_ == null) {
          ensureNodeIdIsMutable();
          nodeId_.remove(index);
          onChanged();
        } else {
          nodeIdBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder getNodeIdBuilder(
          int index) {
        return getNodeIdFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder getNodeIdOrBuilder(
          int index) {
        if (nodeIdBuilder_ == null) {
          return nodeId_.get(index);  } else {
          return nodeIdBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
           getNodeIdOrBuilderList() {
        if (nodeIdBuilder_ != null) {
          return nodeIdBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nodeId_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder addNodeIdBuilder() {
        return getNodeIdFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder addNodeIdBuilder(
          int index) {
        return getNodeIdFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeIdProto nodeId = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder> 
           getNodeIdBuilderList() {
        return getNodeIdFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder> 
          getNodeIdFieldBuilder() {
        if (nodeIdBuilder_ == null) {
          nodeIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProtoOrBuilder>(
                  nodeId_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          nodeId_ = null;
        }
        return nodeIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.LabelsToNodeIdsProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.LabelsToNodeIdsProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<LabelsToNodeIdsProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<LabelsToNodeIdsProto>() {
      @java.lang.Override
      public LabelsToNodeIdsProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<LabelsToNodeIdsProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<LabelsToNodeIdsProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LabelsToNodeIdsProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeLabelProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeLabelProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string name = 1;</code>
     * @return Whether the name field is set.
     */
    boolean hasName();
    /**
     * <code>optional string name = 1;</code>
     * @return The name.
     */
    java.lang.String getName();
    /**
     * <code>optional string name = 1;</code>
     * @return The bytes for name.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes();

    /**
     * <code>optional bool isExclusive = 2 [default = true];</code>
     * @return Whether the isExclusive field is set.
     */
    boolean hasIsExclusive();
    /**
     * <code>optional bool isExclusive = 2 [default = true];</code>
     * @return The isExclusive.
     */
    boolean getIsExclusive();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeLabelProto}
   */
  public static final class NodeLabelProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeLabelProto)
      NodeLabelProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeLabelProto.newBuilder() to construct.
    private NodeLabelProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeLabelProto() {
      name_ = "";
      isExclusive_ = true;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeLabelProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeLabelProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeLabelProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder.class);
    }

    private int bitField0_;
    public static final int NAME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object name_ = "";
    /**
     * <code>optional string name = 1;</code>
     * @return Whether the name field is set.
     */
    @java.lang.Override
    public boolean hasName() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string name = 1;</code>
     * @return The name.
     */
    @java.lang.Override
    public java.lang.String getName() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          name_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string name = 1;</code>
     * @return The bytes for name.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        name_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int ISEXCLUSIVE_FIELD_NUMBER = 2;
    private boolean isExclusive_ = true;
    /**
     * <code>optional bool isExclusive = 2 [default = true];</code>
     * @return Whether the isExclusive field is set.
     */
    @java.lang.Override
    public boolean hasIsExclusive() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional bool isExclusive = 2 [default = true];</code>
     * @return The isExclusive.
     */
    @java.lang.Override
    public boolean getIsExclusive() {
      return isExclusive_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeBool(2, isExclusive_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(2, isExclusive_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto other = (org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto) obj;

      if (hasName() != other.hasName()) return false;
      if (hasName()) {
        if (!getName()
            .equals(other.getName())) return false;
      }
      if (hasIsExclusive() != other.hasIsExclusive()) return false;
      if (hasIsExclusive()) {
        if (getIsExclusive()
            != other.getIsExclusive()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasName()) {
        hash = (37 * hash) + NAME_FIELD_NUMBER;
        hash = (53 * hash) + getName().hashCode();
      }
      if (hasIsExclusive()) {
        hash = (37 * hash) + ISEXCLUSIVE_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getIsExclusive());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeLabelProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeLabelProto)
        org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeLabelProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeLabelProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        name_ = "";
        isExclusive_ = true;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeLabelProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.name_ = name_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.isExclusive_ = isExclusive_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto.getDefaultInstance()) return this;
        if (other.hasName()) {
          name_ = other.name_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasIsExclusive()) {
          setIsExclusive(other.getIsExclusive());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                name_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                isExclusive_ = input.readBool();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object name_ = "";
      /**
       * <code>optional string name = 1;</code>
       * @return Whether the name field is set.
       */
      public boolean hasName() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string name = 1;</code>
       * @return The name.
       */
      public java.lang.String getName() {
        java.lang.Object ref = name_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            name_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string name = 1;</code>
       * @return The bytes for name.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNameBytes() {
        java.lang.Object ref = name_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          name_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string name = 1;</code>
       * @param value The name to set.
       * @return This builder for chaining.
       */
      public Builder setName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string name = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearName() {
        name_ = getDefaultInstance().getName();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string name = 1;</code>
       * @param value The bytes for name to set.
       * @return This builder for chaining.
       */
      public Builder setNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private boolean isExclusive_ = true;
      /**
       * <code>optional bool isExclusive = 2 [default = true];</code>
       * @return Whether the isExclusive field is set.
       */
      @java.lang.Override
      public boolean hasIsExclusive() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional bool isExclusive = 2 [default = true];</code>
       * @return The isExclusive.
       */
      @java.lang.Override
      public boolean getIsExclusive() {
        return isExclusive_;
      }
      /**
       * <code>optional bool isExclusive = 2 [default = true];</code>
       * @param value The isExclusive to set.
       * @return This builder for chaining.
       */
      public Builder setIsExclusive(boolean value) {

        isExclusive_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool isExclusive = 2 [default = true];</code>
       * @return This builder for chaining.
       */
      public Builder clearIsExclusive() {
        bitField0_ = (bitField0_ & ~0x00000002);
        isExclusive_ = true;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeLabelProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeLabelProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeLabelProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeLabelProto>() {
      @java.lang.Override
      public NodeLabelProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeLabelProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeLabelProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeAttributeKeyProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeAttributeKeyProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
     * @return Whether the attributePrefix field is set.
     */
    boolean hasAttributePrefix();
    /**
     * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
     * @return The attributePrefix.
     */
    java.lang.String getAttributePrefix();
    /**
     * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
     * @return The bytes for attributePrefix.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAttributePrefixBytes();

    /**
     * <code>required string attributeName = 2;</code>
     * @return Whether the attributeName field is set.
     */
    boolean hasAttributeName();
    /**
     * <code>required string attributeName = 2;</code>
     * @return The attributeName.
     */
    java.lang.String getAttributeName();
    /**
     * <code>required string attributeName = 2;</code>
     * @return The bytes for attributeName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAttributeNameBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeAttributeKeyProto}
   */
  public static final class NodeAttributeKeyProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeAttributeKeyProto)
      NodeAttributeKeyProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeAttributeKeyProto.newBuilder() to construct.
    private NodeAttributeKeyProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeAttributeKeyProto() {
      attributePrefix_ = "rm.yarn.io";
      attributeName_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeAttributeKeyProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeKeyProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder.class);
    }

    private int bitField0_;
    public static final int ATTRIBUTEPREFIX_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object attributePrefix_ = "rm.yarn.io";
    /**
     * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
     * @return Whether the attributePrefix field is set.
     */
    @java.lang.Override
    public boolean hasAttributePrefix() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
     * @return The attributePrefix.
     */
    @java.lang.Override
    public java.lang.String getAttributePrefix() {
      java.lang.Object ref = attributePrefix_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          attributePrefix_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
     * @return The bytes for attributePrefix.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAttributePrefixBytes() {
      java.lang.Object ref = attributePrefix_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        attributePrefix_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int ATTRIBUTENAME_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object attributeName_ = "";
    /**
     * <code>required string attributeName = 2;</code>
     * @return Whether the attributeName field is set.
     */
    @java.lang.Override
    public boolean hasAttributeName() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required string attributeName = 2;</code>
     * @return The attributeName.
     */
    @java.lang.Override
    public java.lang.String getAttributeName() {
      java.lang.Object ref = attributeName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          attributeName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string attributeName = 2;</code>
     * @return The bytes for attributeName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAttributeNameBytes() {
      java.lang.Object ref = attributeName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        attributeName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasAttributeName()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, attributePrefix_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, attributeName_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, attributePrefix_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, attributeName_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto other = (org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto) obj;

      if (hasAttributePrefix() != other.hasAttributePrefix()) return false;
      if (hasAttributePrefix()) {
        if (!getAttributePrefix()
            .equals(other.getAttributePrefix())) return false;
      }
      if (hasAttributeName() != other.hasAttributeName()) return false;
      if (hasAttributeName()) {
        if (!getAttributeName()
            .equals(other.getAttributeName())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasAttributePrefix()) {
        hash = (37 * hash) + ATTRIBUTEPREFIX_FIELD_NUMBER;
        hash = (53 * hash) + getAttributePrefix().hashCode();
      }
      if (hasAttributeName()) {
        hash = (37 * hash) + ATTRIBUTENAME_FIELD_NUMBER;
        hash = (53 * hash) + getAttributeName().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeAttributeKeyProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeAttributeKeyProto)
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeKeyProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        attributePrefix_ = "rm.yarn.io";
        attributeName_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.attributePrefix_ = attributePrefix_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.attributeName_ = attributeName_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance()) return this;
        if (other.hasAttributePrefix()) {
          attributePrefix_ = other.attributePrefix_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasAttributeName()) {
          attributeName_ = other.attributeName_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasAttributeName()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                attributePrefix_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                attributeName_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object attributePrefix_ = "rm.yarn.io";
      /**
       * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
       * @return Whether the attributePrefix field is set.
       */
      public boolean hasAttributePrefix() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
       * @return The attributePrefix.
       */
      public java.lang.String getAttributePrefix() {
        java.lang.Object ref = attributePrefix_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            attributePrefix_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
       * @return The bytes for attributePrefix.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAttributePrefixBytes() {
        java.lang.Object ref = attributePrefix_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          attributePrefix_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
       * @param value The attributePrefix to set.
       * @return This builder for chaining.
       */
      public Builder setAttributePrefix(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        attributePrefix_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
       * @return This builder for chaining.
       */
      public Builder clearAttributePrefix() {
        attributePrefix_ = getDefaultInstance().getAttributePrefix();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string attributePrefix = 1 [default = "rm.yarn.io"];</code>
       * @param value The bytes for attributePrefix to set.
       * @return This builder for chaining.
       */
      public Builder setAttributePrefixBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        attributePrefix_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object attributeName_ = "";
      /**
       * <code>required string attributeName = 2;</code>
       * @return Whether the attributeName field is set.
       */
      public boolean hasAttributeName() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required string attributeName = 2;</code>
       * @return The attributeName.
       */
      public java.lang.String getAttributeName() {
        java.lang.Object ref = attributeName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            attributeName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string attributeName = 2;</code>
       * @return The bytes for attributeName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAttributeNameBytes() {
        java.lang.Object ref = attributeName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          attributeName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string attributeName = 2;</code>
       * @param value The attributeName to set.
       * @return This builder for chaining.
       */
      public Builder setAttributeName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        attributeName_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required string attributeName = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearAttributeName() {
        attributeName_ = getDefaultInstance().getAttributeName();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>required string attributeName = 2;</code>
       * @param value The bytes for attributeName to set.
       * @return This builder for chaining.
       */
      public Builder setAttributeNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        attributeName_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeAttributeKeyProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeAttributeKeyProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeAttributeKeyProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeAttributeKeyProto>() {
      @java.lang.Override
      public NodeAttributeKeyProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeAttributeKeyProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeAttributeKeyProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeAttributeProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeAttributeProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     * @return Whether the attributeKey field is set.
     */
    boolean hasAttributeKey();
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     * @return The attributeKey.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey();
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder();

    /**
     * <code>optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING];</code>
     * @return Whether the attributeType field is set.
     */
    boolean hasAttributeType();
    /**
     * <code>optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING];</code>
     * @return The attributeType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType();

    /**
     * <code>optional string attributeValue = 3 [default = ""];</code>
     * @return Whether the attributeValue field is set.
     */
    boolean hasAttributeValue();
    /**
     * <code>optional string attributeValue = 3 [default = ""];</code>
     * @return The attributeValue.
     */
    java.lang.String getAttributeValue();
    /**
     * <code>optional string attributeValue = 3 [default = ""];</code>
     * @return The bytes for attributeValue.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAttributeValueBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeAttributeProto}
   */
  public static final class NodeAttributeProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeAttributeProto)
      NodeAttributeProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeAttributeProto.newBuilder() to construct.
    private NodeAttributeProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeAttributeProto() {
      attributeType_ = 1;
      attributeValue_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeAttributeProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder.class);
    }

    private int bitField0_;
    public static final int ATTRIBUTEKEY_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto attributeKey_;
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     * @return Whether the attributeKey field is set.
     */
    @java.lang.Override
    public boolean hasAttributeKey() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     * @return The attributeKey.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey() {
      return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_;
    }
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder() {
      return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_;
    }

    public static final int ATTRIBUTETYPE_FIELD_NUMBER = 2;
    private int attributeType_ = 1;
    /**
     * <code>optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING];</code>
     * @return Whether the attributeType field is set.
     */
    @java.lang.Override public boolean hasAttributeType() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING];</code>
     * @return The attributeType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType() {
      org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(attributeType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.STRING : result;
    }

    public static final int ATTRIBUTEVALUE_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object attributeValue_ = "";
    /**
     * <code>optional string attributeValue = 3 [default = ""];</code>
     * @return Whether the attributeValue field is set.
     */
    @java.lang.Override
    public boolean hasAttributeValue() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string attributeValue = 3 [default = ""];</code>
     * @return The attributeValue.
     */
    @java.lang.Override
    public java.lang.String getAttributeValue() {
      java.lang.Object ref = attributeValue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          attributeValue_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string attributeValue = 3 [default = ""];</code>
     * @return The bytes for attributeValue.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAttributeValueBytes() {
      java.lang.Object ref = attributeValue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        attributeValue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasAttributeKey()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getAttributeKey().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getAttributeKey());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeEnum(2, attributeType_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, attributeValue_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getAttributeKey());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(2, attributeType_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, attributeValue_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto other = (org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto) obj;

      if (hasAttributeKey() != other.hasAttributeKey()) return false;
      if (hasAttributeKey()) {
        if (!getAttributeKey()
            .equals(other.getAttributeKey())) return false;
      }
      if (hasAttributeType() != other.hasAttributeType()) return false;
      if (hasAttributeType()) {
        if (attributeType_ != other.attributeType_) return false;
      }
      if (hasAttributeValue() != other.hasAttributeValue()) return false;
      if (hasAttributeValue()) {
        if (!getAttributeValue()
            .equals(other.getAttributeValue())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasAttributeKey()) {
        hash = (37 * hash) + ATTRIBUTEKEY_FIELD_NUMBER;
        hash = (53 * hash) + getAttributeKey().hashCode();
      }
      if (hasAttributeType()) {
        hash = (37 * hash) + ATTRIBUTETYPE_FIELD_NUMBER;
        hash = (53 * hash) + attributeType_;
      }
      if (hasAttributeValue()) {
        hash = (37 * hash) + ATTRIBUTEVALUE_FIELD_NUMBER;
        hash = (53 * hash) + getAttributeValue().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeAttributeProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeAttributeProto)
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getAttributeKeyFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        attributeKey_ = null;
        if (attributeKeyBuilder_ != null) {
          attributeKeyBuilder_.dispose();
          attributeKeyBuilder_ = null;
        }
        attributeType_ = 1;
        attributeValue_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.attributeKey_ = attributeKeyBuilder_ == null
              ? attributeKey_
              : attributeKeyBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.attributeType_ = attributeType_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.attributeValue_ = attributeValue_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance()) return this;
        if (other.hasAttributeKey()) {
          mergeAttributeKey(other.getAttributeKey());
        }
        if (other.hasAttributeType()) {
          setAttributeType(other.getAttributeType());
        }
        if (other.hasAttributeValue()) {
          attributeValue_ = other.attributeValue_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasAttributeKey()) {
          return false;
        }
        if (!getAttributeKey().isInitialized()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getAttributeKeyFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(2, tmpRaw);
                } else {
                  attributeType_ = tmpRaw;
                  bitField0_ |= 0x00000002;
                }
                break;
              } // case 16
              case 26: {
                attributeValue_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto attributeKey_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> attributeKeyBuilder_;
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       * @return Whether the attributeKey field is set.
       */
      public boolean hasAttributeKey() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       * @return The attributeKey.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey() {
        if (attributeKeyBuilder_ == null) {
          return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_;
        } else {
          return attributeKeyBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public Builder setAttributeKey(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) {
        if (attributeKeyBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          attributeKey_ = value;
        } else {
          attributeKeyBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public Builder setAttributeKey(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) {
        if (attributeKeyBuilder_ == null) {
          attributeKey_ = builderForValue.build();
        } else {
          attributeKeyBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public Builder mergeAttributeKey(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) {
        if (attributeKeyBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            attributeKey_ != null &&
            attributeKey_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance()) {
            getAttributeKeyBuilder().mergeFrom(value);
          } else {
            attributeKey_ = value;
          }
        } else {
          attributeKeyBuilder_.mergeFrom(value);
        }
        if (attributeKey_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public Builder clearAttributeKey() {
        bitField0_ = (bitField0_ & ~0x00000001);
        attributeKey_ = null;
        if (attributeKeyBuilder_ != null) {
          attributeKeyBuilder_.dispose();
          attributeKeyBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder getAttributeKeyBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getAttributeKeyFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder() {
        if (attributeKeyBuilder_ != null) {
          return attributeKeyBuilder_.getMessageOrBuilder();
        } else {
          return attributeKey_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_;
        }
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> 
          getAttributeKeyFieldBuilder() {
        if (attributeKeyBuilder_ == null) {
          attributeKeyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder>(
                  getAttributeKey(),
                  getParentForChildren(),
                  isClean());
          attributeKey_ = null;
        }
        return attributeKeyBuilder_;
      }

      private int attributeType_ = 1;
      /**
       * <code>optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING];</code>
       * @return Whether the attributeType field is set.
       */
      @java.lang.Override public boolean hasAttributeType() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING];</code>
       * @return The attributeType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(attributeType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.STRING : result;
      }
      /**
       * <code>optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING];</code>
       * @param value The attributeType to set.
       * @return This builder for chaining.
       */
      public Builder setAttributeType(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        attributeType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeAttributeTypeProto attributeType = 2 [default = STRING];</code>
       * @return This builder for chaining.
       */
      public Builder clearAttributeType() {
        bitField0_ = (bitField0_ & ~0x00000002);
        attributeType_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object attributeValue_ = "";
      /**
       * <code>optional string attributeValue = 3 [default = ""];</code>
       * @return Whether the attributeValue field is set.
       */
      public boolean hasAttributeValue() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string attributeValue = 3 [default = ""];</code>
       * @return The attributeValue.
       */
      public java.lang.String getAttributeValue() {
        java.lang.Object ref = attributeValue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            attributeValue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string attributeValue = 3 [default = ""];</code>
       * @return The bytes for attributeValue.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAttributeValueBytes() {
        java.lang.Object ref = attributeValue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          attributeValue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string attributeValue = 3 [default = ""];</code>
       * @param value The attributeValue to set.
       * @return This builder for chaining.
       */
      public Builder setAttributeValue(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        attributeValue_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string attributeValue = 3 [default = ""];</code>
       * @return This builder for chaining.
       */
      public Builder clearAttributeValue() {
        attributeValue_ = getDefaultInstance().getAttributeValue();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string attributeValue = 3 [default = ""];</code>
       * @param value The bytes for attributeValue to set.
       * @return This builder for chaining.
       */
      public Builder setAttributeValueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        attributeValue_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeAttributeProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeAttributeProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeAttributeProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeAttributeProto>() {
      @java.lang.Override
      public NodeAttributeProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeAttributeProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeAttributeProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeAttributeInfoProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeAttributeInfoProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     * @return Whether the attributeKey field is set.
     */
    boolean hasAttributeKey();
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     * @return The attributeKey.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey();
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder();

    /**
     * <code>required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2;</code>
     * @return Whether the attributeType field is set.
     */
    boolean hasAttributeType();
    /**
     * <code>required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2;</code>
     * @return The attributeType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeAttributeInfoProto}
   */
  public static final class NodeAttributeInfoProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeAttributeInfoProto)
      NodeAttributeInfoProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeAttributeInfoProto.newBuilder() to construct.
    private NodeAttributeInfoProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeAttributeInfoProto() {
      attributeType_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeAttributeInfoProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeInfoProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder.class);
    }

    private int bitField0_;
    public static final int ATTRIBUTEKEY_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto attributeKey_;
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     * @return Whether the attributeKey field is set.
     */
    @java.lang.Override
    public boolean hasAttributeKey() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     * @return The attributeKey.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey() {
      return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_;
    }
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder() {
      return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_;
    }

    public static final int ATTRIBUTETYPE_FIELD_NUMBER = 2;
    private int attributeType_ = 1;
    /**
     * <code>required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2;</code>
     * @return Whether the attributeType field is set.
     */
    @java.lang.Override public boolean hasAttributeType() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2;</code>
     * @return The attributeType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType() {
      org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(attributeType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.STRING : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasAttributeKey()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasAttributeType()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getAttributeKey().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getAttributeKey());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeEnum(2, attributeType_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getAttributeKey());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(2, attributeType_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto other = (org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto) obj;

      if (hasAttributeKey() != other.hasAttributeKey()) return false;
      if (hasAttributeKey()) {
        if (!getAttributeKey()
            .equals(other.getAttributeKey())) return false;
      }
      if (hasAttributeType() != other.hasAttributeType()) return false;
      if (hasAttributeType()) {
        if (attributeType_ != other.attributeType_) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasAttributeKey()) {
        hash = (37 * hash) + ATTRIBUTEKEY_FIELD_NUMBER;
        hash = (53 * hash) + getAttributeKey().hashCode();
      }
      if (hasAttributeType()) {
        hash = (37 * hash) + ATTRIBUTETYPE_FIELD_NUMBER;
        hash = (53 * hash) + attributeType_;
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeAttributeInfoProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeAttributeInfoProto)
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeInfoProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getAttributeKeyFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        attributeKey_ = null;
        if (attributeKeyBuilder_ != null) {
          attributeKeyBuilder_.dispose();
          attributeKeyBuilder_ = null;
        }
        attributeType_ = 1;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.attributeKey_ = attributeKeyBuilder_ == null
              ? attributeKey_
              : attributeKeyBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.attributeType_ = attributeType_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto.getDefaultInstance()) return this;
        if (other.hasAttributeKey()) {
          mergeAttributeKey(other.getAttributeKey());
        }
        if (other.hasAttributeType()) {
          setAttributeType(other.getAttributeType());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasAttributeKey()) {
          return false;
        }
        if (!hasAttributeType()) {
          return false;
        }
        if (!getAttributeKey().isInitialized()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getAttributeKeyFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(2, tmpRaw);
                } else {
                  attributeType_ = tmpRaw;
                  bitField0_ |= 0x00000002;
                }
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto attributeKey_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> attributeKeyBuilder_;
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       * @return Whether the attributeKey field is set.
       */
      public boolean hasAttributeKey() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       * @return The attributeKey.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getAttributeKey() {
        if (attributeKeyBuilder_ == null) {
          return attributeKey_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_;
        } else {
          return attributeKeyBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public Builder setAttributeKey(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) {
        if (attributeKeyBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          attributeKey_ = value;
        } else {
          attributeKeyBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public Builder setAttributeKey(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) {
        if (attributeKeyBuilder_ == null) {
          attributeKey_ = builderForValue.build();
        } else {
          attributeKeyBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public Builder mergeAttributeKey(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) {
        if (attributeKeyBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            attributeKey_ != null &&
            attributeKey_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance()) {
            getAttributeKeyBuilder().mergeFrom(value);
          } else {
            attributeKey_ = value;
          }
        } else {
          attributeKeyBuilder_.mergeFrom(value);
        }
        if (attributeKey_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public Builder clearAttributeKey() {
        bitField0_ = (bitField0_ & ~0x00000001);
        attributeKey_ = null;
        if (attributeKeyBuilder_ != null) {
          attributeKeyBuilder_.dispose();
          attributeKeyBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder getAttributeKeyBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getAttributeKeyFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getAttributeKeyOrBuilder() {
        if (attributeKeyBuilder_ != null) {
          return attributeKeyBuilder_.getMessageOrBuilder();
        } else {
          return attributeKey_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : attributeKey_;
        }
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto attributeKey = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> 
          getAttributeKeyFieldBuilder() {
        if (attributeKeyBuilder_ == null) {
          attributeKeyBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder>(
                  getAttributeKey(),
                  getParentForChildren(),
                  isClean());
          attributeKey_ = null;
        }
        return attributeKeyBuilder_;
      }

      private int attributeType_ = 1;
      /**
       * <code>required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2;</code>
       * @return Whether the attributeType field is set.
       */
      @java.lang.Override public boolean hasAttributeType() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2;</code>
       * @return The attributeType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto getAttributeType() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.forNumber(attributeType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto.STRING : result;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2;</code>
       * @param value The attributeType to set.
       * @return This builder for chaining.
       */
      public Builder setAttributeType(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        attributeType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeTypeProto attributeType = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearAttributeType() {
        bitField0_ = (bitField0_ & ~0x00000002);
        attributeType_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeAttributeInfoProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeAttributeInfoProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeAttributeInfoProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeAttributeInfoProto>() {
      @java.lang.Override
      public NodeAttributeInfoProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeAttributeInfoProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeAttributeInfoProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeToAttributeValueProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeToAttributeValueProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string hostname = 1;</code>
     * @return Whether the hostname field is set.
     */
    boolean hasHostname();
    /**
     * <code>required string hostname = 1;</code>
     * @return The hostname.
     */
    java.lang.String getHostname();
    /**
     * <code>required string hostname = 1;</code>
     * @return The bytes for hostname.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostnameBytes();

    /**
     * <code>required string attributeValue = 2;</code>
     * @return Whether the attributeValue field is set.
     */
    boolean hasAttributeValue();
    /**
     * <code>required string attributeValue = 2;</code>
     * @return The attributeValue.
     */
    java.lang.String getAttributeValue();
    /**
     * <code>required string attributeValue = 2;</code>
     * @return The bytes for attributeValue.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAttributeValueBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeToAttributeValueProto}
   */
  public static final class NodeToAttributeValueProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeToAttributeValueProto)
      NodeToAttributeValueProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeToAttributeValueProto.newBuilder() to construct.
    private NodeToAttributeValueProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeToAttributeValueProto() {
      hostname_ = "";
      attributeValue_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeToAttributeValueProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributeValueProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder.class);
    }

    private int bitField0_;
    public static final int HOSTNAME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object hostname_ = "";
    /**
     * <code>required string hostname = 1;</code>
     * @return Whether the hostname field is set.
     */
    @java.lang.Override
    public boolean hasHostname() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string hostname = 1;</code>
     * @return The hostname.
     */
    @java.lang.Override
    public java.lang.String getHostname() {
      java.lang.Object ref = hostname_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          hostname_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string hostname = 1;</code>
     * @return The bytes for hostname.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHostnameBytes() {
      java.lang.Object ref = hostname_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        hostname_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int ATTRIBUTEVALUE_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object attributeValue_ = "";
    /**
     * <code>required string attributeValue = 2;</code>
     * @return Whether the attributeValue field is set.
     */
    @java.lang.Override
    public boolean hasAttributeValue() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required string attributeValue = 2;</code>
     * @return The attributeValue.
     */
    @java.lang.Override
    public java.lang.String getAttributeValue() {
      java.lang.Object ref = attributeValue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          attributeValue_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string attributeValue = 2;</code>
     * @return The bytes for attributeValue.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAttributeValueBytes() {
      java.lang.Object ref = attributeValue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        attributeValue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasHostname()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasAttributeValue()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, hostname_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, attributeValue_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, hostname_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, attributeValue_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto other = (org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto) obj;

      if (hasHostname() != other.hasHostname()) return false;
      if (hasHostname()) {
        if (!getHostname()
            .equals(other.getHostname())) return false;
      }
      if (hasAttributeValue() != other.hasAttributeValue()) return false;
      if (hasAttributeValue()) {
        if (!getAttributeValue()
            .equals(other.getAttributeValue())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasHostname()) {
        hash = (37 * hash) + HOSTNAME_FIELD_NUMBER;
        hash = (53 * hash) + getHostname().hashCode();
      }
      if (hasAttributeValue()) {
        hash = (37 * hash) + ATTRIBUTEVALUE_FIELD_NUMBER;
        hash = (53 * hash) + getAttributeValue().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeToAttributeValueProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeToAttributeValueProto)
        org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributeValueProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        hostname_ = "";
        attributeValue_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.hostname_ = hostname_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.attributeValue_ = attributeValue_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.getDefaultInstance()) return this;
        if (other.hasHostname()) {
          hostname_ = other.hostname_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasAttributeValue()) {
          attributeValue_ = other.attributeValue_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasHostname()) {
          return false;
        }
        if (!hasAttributeValue()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                hostname_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                attributeValue_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object hostname_ = "";
      /**
       * <code>required string hostname = 1;</code>
       * @return Whether the hostname field is set.
       */
      public boolean hasHostname() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string hostname = 1;</code>
       * @return The hostname.
       */
      public java.lang.String getHostname() {
        java.lang.Object ref = hostname_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            hostname_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string hostname = 1;</code>
       * @return The bytes for hostname.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHostnameBytes() {
        java.lang.Object ref = hostname_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          hostname_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string hostname = 1;</code>
       * @param value The hostname to set.
       * @return This builder for chaining.
       */
      public Builder setHostname(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        hostname_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string hostname = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearHostname() {
        hostname_ = getDefaultInstance().getHostname();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string hostname = 1;</code>
       * @param value The bytes for hostname to set.
       * @return This builder for chaining.
       */
      public Builder setHostnameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        hostname_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object attributeValue_ = "";
      /**
       * <code>required string attributeValue = 2;</code>
       * @return Whether the attributeValue field is set.
       */
      public boolean hasAttributeValue() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required string attributeValue = 2;</code>
       * @return The attributeValue.
       */
      public java.lang.String getAttributeValue() {
        java.lang.Object ref = attributeValue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            attributeValue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string attributeValue = 2;</code>
       * @return The bytes for attributeValue.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAttributeValueBytes() {
        java.lang.Object ref = attributeValue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          attributeValue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string attributeValue = 2;</code>
       * @param value The attributeValue to set.
       * @return This builder for chaining.
       */
      public Builder setAttributeValue(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        attributeValue_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required string attributeValue = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearAttributeValue() {
        attributeValue_ = getDefaultInstance().getAttributeValue();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>required string attributeValue = 2;</code>
       * @param value The bytes for attributeValue to set.
       * @return This builder for chaining.
       */
      public Builder setAttributeValueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        attributeValue_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeToAttributeValueProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeToAttributeValueProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeToAttributeValueProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeToAttributeValueProto>() {
      @java.lang.Override
      public NodeToAttributeValueProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeToAttributeValueProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeToAttributeValueProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface AttributeToNodesProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.AttributeToNodesProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
     * @return Whether the nodeAttribute field is set.
     */
    boolean hasNodeAttribute();
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
     * @return The nodeAttribute.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttribute();
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributeOrBuilder();

    /**
     * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto> 
        getNodeValueMapList();
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getNodeValueMap(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
     */
    int getNodeValueMapCount();
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder> 
        getNodeValueMapOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder getNodeValueMapOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.AttributeToNodesProto}
   */
  public static final class AttributeToNodesProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.AttributeToNodesProto)
      AttributeToNodesProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use AttributeToNodesProto.newBuilder() to construct.
    private AttributeToNodesProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private AttributeToNodesProto() {
      nodeValueMap_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new AttributeToNodesProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AttributeToNodesProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AttributeToNodesProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder.class);
    }

    private int bitField0_;
    public static final int NODEATTRIBUTE_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto nodeAttribute_;
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
     * @return Whether the nodeAttribute field is set.
     */
    @java.lang.Override
    public boolean hasNodeAttribute() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
     * @return The nodeAttribute.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttribute() {
      return nodeAttribute_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : nodeAttribute_;
    }
    /**
     * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributeOrBuilder() {
      return nodeAttribute_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : nodeAttribute_;
    }

    public static final int NODEVALUEMAP_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto> nodeValueMap_;
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto> getNodeValueMapList() {
      return nodeValueMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder> 
        getNodeValueMapOrBuilderList() {
      return nodeValueMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
     */
    @java.lang.Override
    public int getNodeValueMapCount() {
      return nodeValueMap_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getNodeValueMap(int index) {
      return nodeValueMap_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder getNodeValueMapOrBuilder(
        int index) {
      return nodeValueMap_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasNodeAttribute()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getNodeAttribute().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      for (int i = 0; i < getNodeValueMapCount(); i++) {
        if (!getNodeValueMap(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getNodeAttribute());
      }
      for (int i = 0; i < nodeValueMap_.size(); i++) {
        output.writeMessage(2, nodeValueMap_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getNodeAttribute());
      }
      for (int i = 0; i < nodeValueMap_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, nodeValueMap_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto other = (org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto) obj;

      if (hasNodeAttribute() != other.hasNodeAttribute()) return false;
      if (hasNodeAttribute()) {
        if (!getNodeAttribute()
            .equals(other.getNodeAttribute())) return false;
      }
      if (!getNodeValueMapList()
          .equals(other.getNodeValueMapList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNodeAttribute()) {
        hash = (37 * hash) + NODEATTRIBUTE_FIELD_NUMBER;
        hash = (53 * hash) + getNodeAttribute().hashCode();
      }
      if (getNodeValueMapCount() > 0) {
        hash = (37 * hash) + NODEVALUEMAP_FIELD_NUMBER;
        hash = (53 * hash) + getNodeValueMapList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.AttributeToNodesProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.AttributeToNodesProto)
        org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AttributeToNodesProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AttributeToNodesProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getNodeAttributeFieldBuilder();
          getNodeValueMapFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        nodeAttribute_ = null;
        if (nodeAttributeBuilder_ != null) {
          nodeAttributeBuilder_.dispose();
          nodeAttributeBuilder_ = null;
        }
        if (nodeValueMapBuilder_ == null) {
          nodeValueMap_ = java.util.Collections.emptyList();
        } else {
          nodeValueMap_ = null;
          nodeValueMapBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_AttributeToNodesProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto result = new org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto result) {
        if (nodeValueMapBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            nodeValueMap_ = java.util.Collections.unmodifiableList(nodeValueMap_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.nodeValueMap_ = nodeValueMap_;
        } else {
          result.nodeValueMap_ = nodeValueMapBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.nodeAttribute_ = nodeAttributeBuilder_ == null
              ? nodeAttribute_
              : nodeAttributeBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto.getDefaultInstance()) return this;
        if (other.hasNodeAttribute()) {
          mergeNodeAttribute(other.getNodeAttribute());
        }
        if (nodeValueMapBuilder_ == null) {
          if (!other.nodeValueMap_.isEmpty()) {
            if (nodeValueMap_.isEmpty()) {
              nodeValueMap_ = other.nodeValueMap_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureNodeValueMapIsMutable();
              nodeValueMap_.addAll(other.nodeValueMap_);
            }
            onChanged();
          }
        } else {
          if (!other.nodeValueMap_.isEmpty()) {
            if (nodeValueMapBuilder_.isEmpty()) {
              nodeValueMapBuilder_.dispose();
              nodeValueMapBuilder_ = null;
              nodeValueMap_ = other.nodeValueMap_;
              bitField0_ = (bitField0_ & ~0x00000002);
              nodeValueMapBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNodeValueMapFieldBuilder() : null;
            } else {
              nodeValueMapBuilder_.addAllMessages(other.nodeValueMap_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasNodeAttribute()) {
          return false;
        }
        if (!getNodeAttribute().isInitialized()) {
          return false;
        }
        for (int i = 0; i < getNodeValueMapCount(); i++) {
          if (!getNodeValueMap(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getNodeAttributeFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.PARSER,
                        extensionRegistry);
                if (nodeValueMapBuilder_ == null) {
                  ensureNodeValueMapIsMutable();
                  nodeValueMap_.add(m);
                } else {
                  nodeValueMapBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto nodeAttribute_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> nodeAttributeBuilder_;
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
       * @return Whether the nodeAttribute field is set.
       */
      public boolean hasNodeAttribute() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
       * @return The nodeAttribute.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto getNodeAttribute() {
        if (nodeAttributeBuilder_ == null) {
          return nodeAttribute_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : nodeAttribute_;
        } else {
          return nodeAttributeBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
       */
      public Builder setNodeAttribute(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) {
        if (nodeAttributeBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          nodeAttribute_ = value;
        } else {
          nodeAttributeBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
       */
      public Builder setNodeAttribute(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder builderForValue) {
        if (nodeAttributeBuilder_ == null) {
          nodeAttribute_ = builderForValue.build();
        } else {
          nodeAttributeBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
       */
      public Builder mergeNodeAttribute(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto value) {
        if (nodeAttributeBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            nodeAttribute_ != null &&
            nodeAttribute_ != org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance()) {
            getNodeAttributeBuilder().mergeFrom(value);
          } else {
            nodeAttribute_ = value;
          }
        } else {
          nodeAttributeBuilder_.mergeFrom(value);
        }
        if (nodeAttribute_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
       */
      public Builder clearNodeAttribute() {
        bitField0_ = (bitField0_ & ~0x00000001);
        nodeAttribute_ = null;
        if (nodeAttributeBuilder_ != null) {
          nodeAttributeBuilder_.dispose();
          nodeAttributeBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder getNodeAttributeBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getNodeAttributeFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder getNodeAttributeOrBuilder() {
        if (nodeAttributeBuilder_ != null) {
          return nodeAttributeBuilder_.getMessageOrBuilder();
        } else {
          return nodeAttribute_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.getDefaultInstance() : nodeAttribute_;
        }
      }
      /**
       * <code>required .hadoop.yarn.NodeAttributeKeyProto nodeAttribute = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder> 
          getNodeAttributeFieldBuilder() {
        if (nodeAttributeBuilder_ == null) {
          nodeAttributeBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder>(
                  getNodeAttribute(),
                  getParentForChildren(),
                  isClean());
          nodeAttribute_ = null;
        }
        return nodeAttributeBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto> nodeValueMap_ =
        java.util.Collections.emptyList();
      private void ensureNodeValueMapIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          nodeValueMap_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto>(nodeValueMap_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder> nodeValueMapBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto> getNodeValueMapList() {
        if (nodeValueMapBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nodeValueMap_);
        } else {
          return nodeValueMapBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public int getNodeValueMapCount() {
        if (nodeValueMapBuilder_ == null) {
          return nodeValueMap_.size();
        } else {
          return nodeValueMapBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto getNodeValueMap(int index) {
        if (nodeValueMapBuilder_ == null) {
          return nodeValueMap_.get(index);
        } else {
          return nodeValueMapBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public Builder setNodeValueMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto value) {
        if (nodeValueMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeValueMapIsMutable();
          nodeValueMap_.set(index, value);
          onChanged();
        } else {
          nodeValueMapBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public Builder setNodeValueMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder builderForValue) {
        if (nodeValueMapBuilder_ == null) {
          ensureNodeValueMapIsMutable();
          nodeValueMap_.set(index, builderForValue.build());
          onChanged();
        } else {
          nodeValueMapBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public Builder addNodeValueMap(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto value) {
        if (nodeValueMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeValueMapIsMutable();
          nodeValueMap_.add(value);
          onChanged();
        } else {
          nodeValueMapBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public Builder addNodeValueMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto value) {
        if (nodeValueMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeValueMapIsMutable();
          nodeValueMap_.add(index, value);
          onChanged();
        } else {
          nodeValueMapBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public Builder addNodeValueMap(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder builderForValue) {
        if (nodeValueMapBuilder_ == null) {
          ensureNodeValueMapIsMutable();
          nodeValueMap_.add(builderForValue.build());
          onChanged();
        } else {
          nodeValueMapBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public Builder addNodeValueMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder builderForValue) {
        if (nodeValueMapBuilder_ == null) {
          ensureNodeValueMapIsMutable();
          nodeValueMap_.add(index, builderForValue.build());
          onChanged();
        } else {
          nodeValueMapBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public Builder addAllNodeValueMap(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto> values) {
        if (nodeValueMapBuilder_ == null) {
          ensureNodeValueMapIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nodeValueMap_);
          onChanged();
        } else {
          nodeValueMapBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public Builder clearNodeValueMap() {
        if (nodeValueMapBuilder_ == null) {
          nodeValueMap_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          nodeValueMapBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public Builder removeNodeValueMap(int index) {
        if (nodeValueMapBuilder_ == null) {
          ensureNodeValueMapIsMutable();
          nodeValueMap_.remove(index);
          onChanged();
        } else {
          nodeValueMapBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder getNodeValueMapBuilder(
          int index) {
        return getNodeValueMapFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder getNodeValueMapOrBuilder(
          int index) {
        if (nodeValueMapBuilder_ == null) {
          return nodeValueMap_.get(index);  } else {
          return nodeValueMapBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder> 
           getNodeValueMapOrBuilderList() {
        if (nodeValueMapBuilder_ != null) {
          return nodeValueMapBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nodeValueMap_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder addNodeValueMapBuilder() {
        return getNodeValueMapFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder addNodeValueMapBuilder(
          int index) {
        return getNodeValueMapFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeToAttributeValueProto nodeValueMap = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder> 
           getNodeValueMapBuilderList() {
        return getNodeValueMapFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder> 
          getNodeValueMapFieldBuilder() {
        if (nodeValueMapBuilder_ == null) {
          nodeValueMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder>(
                  nodeValueMap_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          nodeValueMap_ = null;
        }
        return nodeValueMapBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.AttributeToNodesProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.AttributeToNodesProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<AttributeToNodesProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<AttributeToNodesProto>() {
      @java.lang.Override
      public AttributeToNodesProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<AttributeToNodesProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<AttributeToNodesProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeToAttributesProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeToAttributesProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string node = 1;</code>
     * @return Whether the node field is set.
     */
    boolean hasNode();
    /**
     * <code>optional string node = 1;</code>
     * @return The node.
     */
    java.lang.String getNode();
    /**
     * <code>optional string node = 1;</code>
     * @return The bytes for node.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeBytes();

    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> 
        getNodeAttributesList();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index);
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
     */
    int getNodeAttributesCount();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> 
        getNodeAttributesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeToAttributesProto}
   */
  public static final class NodeToAttributesProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeToAttributesProto)
      NodeToAttributesProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeToAttributesProto.newBuilder() to construct.
    private NodeToAttributesProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeToAttributesProto() {
      node_ = "";
      nodeAttributes_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeToAttributesProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributesProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributesProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder.class);
    }

    private int bitField0_;
    public static final int NODE_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object node_ = "";
    /**
     * <code>optional string node = 1;</code>
     * @return Whether the node field is set.
     */
    @java.lang.Override
    public boolean hasNode() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string node = 1;</code>
     * @return The node.
     */
    @java.lang.Override
    public java.lang.String getNode() {
      java.lang.Object ref = node_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          node_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string node = 1;</code>
     * @return The bytes for node.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeBytes() {
      java.lang.Object ref = node_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        node_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int NODEATTRIBUTES_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> nodeAttributes_;
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> getNodeAttributesList() {
      return nodeAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> 
        getNodeAttributesOrBuilderList() {
      return nodeAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
     */
    @java.lang.Override
    public int getNodeAttributesCount() {
      return nodeAttributes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index) {
      return nodeAttributes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder(
        int index) {
      return nodeAttributes_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getNodeAttributesCount(); i++) {
        if (!getNodeAttributes(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, node_);
      }
      for (int i = 0; i < nodeAttributes_.size(); i++) {
        output.writeMessage(2, nodeAttributes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, node_);
      }
      for (int i = 0; i < nodeAttributes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, nodeAttributes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto other = (org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto) obj;

      if (hasNode() != other.hasNode()) return false;
      if (hasNode()) {
        if (!getNode()
            .equals(other.getNode())) return false;
      }
      if (!getNodeAttributesList()
          .equals(other.getNodeAttributesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNode()) {
        hash = (37 * hash) + NODE_FIELD_NUMBER;
        hash = (53 * hash) + getNode().hashCode();
      }
      if (getNodeAttributesCount() > 0) {
        hash = (37 * hash) + NODEATTRIBUTES_FIELD_NUMBER;
        hash = (53 * hash) + getNodeAttributesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeToAttributesProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeToAttributesProto)
        org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributesProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributesProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.class, org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        node_ = "";
        if (nodeAttributesBuilder_ == null) {
          nodeAttributes_ = java.util.Collections.emptyList();
        } else {
          nodeAttributes_ = null;
          nodeAttributesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_NodeToAttributesProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto result = new org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto result) {
        if (nodeAttributesBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            nodeAttributes_ = java.util.Collections.unmodifiableList(nodeAttributes_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.nodeAttributes_ = nodeAttributes_;
        } else {
          result.nodeAttributes_ = nodeAttributesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.node_ = node_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto.getDefaultInstance()) return this;
        if (other.hasNode()) {
          node_ = other.node_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (nodeAttributesBuilder_ == null) {
          if (!other.nodeAttributes_.isEmpty()) {
            if (nodeAttributes_.isEmpty()) {
              nodeAttributes_ = other.nodeAttributes_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureNodeAttributesIsMutable();
              nodeAttributes_.addAll(other.nodeAttributes_);
            }
            onChanged();
          }
        } else {
          if (!other.nodeAttributes_.isEmpty()) {
            if (nodeAttributesBuilder_.isEmpty()) {
              nodeAttributesBuilder_.dispose();
              nodeAttributesBuilder_ = null;
              nodeAttributes_ = other.nodeAttributes_;
              bitField0_ = (bitField0_ & ~0x00000002);
              nodeAttributesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getNodeAttributesFieldBuilder() : null;
            } else {
              nodeAttributesBuilder_.addAllMessages(other.nodeAttributes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getNodeAttributesCount(); i++) {
          if (!getNodeAttributes(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                node_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.PARSER,
                        extensionRegistry);
                if (nodeAttributesBuilder_ == null) {
                  ensureNodeAttributesIsMutable();
                  nodeAttributes_.add(m);
                } else {
                  nodeAttributesBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object node_ = "";
      /**
       * <code>optional string node = 1;</code>
       * @return Whether the node field is set.
       */
      public boolean hasNode() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string node = 1;</code>
       * @return The node.
       */
      public java.lang.String getNode() {
        java.lang.Object ref = node_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            node_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string node = 1;</code>
       * @return The bytes for node.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNodeBytes() {
        java.lang.Object ref = node_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          node_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string node = 1;</code>
       * @param value The node to set.
       * @return This builder for chaining.
       */
      public Builder setNode(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        node_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string node = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearNode() {
        node_ = getDefaultInstance().getNode();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string node = 1;</code>
       * @param value The bytes for node to set.
       * @return This builder for chaining.
       */
      public Builder setNodeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        node_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> nodeAttributes_ =
        java.util.Collections.emptyList();
      private void ensureNodeAttributesIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          nodeAttributes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto>(nodeAttributes_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> nodeAttributesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> getNodeAttributesList() {
        if (nodeAttributesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(nodeAttributes_);
        } else {
          return nodeAttributesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public int getNodeAttributesCount() {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.size();
        } else {
          return nodeAttributesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto getNodeAttributes(int index) {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.get(index);
        } else {
          return nodeAttributesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public Builder setNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.set(index, value);
          onChanged();
        } else {
          nodeAttributesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public Builder setNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.set(index, builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public Builder addNodeAttributes(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(value);
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public Builder addNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto value) {
        if (nodeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(index, value);
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public Builder addNodeAttributes(
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public Builder addNodeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder builderForValue) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.add(index, builderForValue.build());
          onChanged();
        } else {
          nodeAttributesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public Builder addAllNodeAttributes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto> values) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, nodeAttributes_);
          onChanged();
        } else {
          nodeAttributesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public Builder clearNodeAttributes() {
        if (nodeAttributesBuilder_ == null) {
          nodeAttributes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          nodeAttributesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public Builder removeNodeAttributes(int index) {
        if (nodeAttributesBuilder_ == null) {
          ensureNodeAttributesIsMutable();
          nodeAttributes_.remove(index);
          onChanged();
        } else {
          nodeAttributesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder getNodeAttributesBuilder(
          int index) {
        return getNodeAttributesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder getNodeAttributesOrBuilder(
          int index) {
        if (nodeAttributesBuilder_ == null) {
          return nodeAttributes_.get(index);  } else {
          return nodeAttributesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> 
           getNodeAttributesOrBuilderList() {
        if (nodeAttributesBuilder_ != null) {
          return nodeAttributesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(nodeAttributes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder addNodeAttributesBuilder() {
        return getNodeAttributesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder addNodeAttributesBuilder(
          int index) {
        return getNodeAttributesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.NodeAttributeProto nodeAttributes = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder> 
           getNodeAttributesBuilderList() {
        return getNodeAttributesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder> 
          getNodeAttributesFieldBuilder() {
        if (nodeAttributesBuilder_ == null) {
          nodeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder>(
                  nodeAttributes_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          nodeAttributes_ = null;
        }
        return nodeAttributesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeToAttributesProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeToAttributesProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeToAttributesProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeToAttributesProto>() {
      @java.lang.Override
      public NodeToAttributesProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeToAttributesProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeToAttributesProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface DeregisterSubClustersProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.DeregisterSubClustersProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string subClusterId = 1;</code>
     * @return Whether the subClusterId field is set.
     */
    boolean hasSubClusterId();
    /**
     * <code>optional string subClusterId = 1;</code>
     * @return The subClusterId.
     */
    java.lang.String getSubClusterId();
    /**
     * <code>optional string subClusterId = 1;</code>
     * @return The bytes for subClusterId.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterIdBytes();

    /**
     * <code>optional string deregisterState = 2;</code>
     * @return Whether the deregisterState field is set.
     */
    boolean hasDeregisterState();
    /**
     * <code>optional string deregisterState = 2;</code>
     * @return The deregisterState.
     */
    java.lang.String getDeregisterState();
    /**
     * <code>optional string deregisterState = 2;</code>
     * @return The bytes for deregisterState.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDeregisterStateBytes();

    /**
     * <code>optional string lastHeartBeatTime = 3;</code>
     * @return Whether the lastHeartBeatTime field is set.
     */
    boolean hasLastHeartBeatTime();
    /**
     * <code>optional string lastHeartBeatTime = 3;</code>
     * @return The lastHeartBeatTime.
     */
    java.lang.String getLastHeartBeatTime();
    /**
     * <code>optional string lastHeartBeatTime = 3;</code>
     * @return The bytes for lastHeartBeatTime.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getLastHeartBeatTimeBytes();

    /**
     * <code>optional string information = 4;</code>
     * @return Whether the information field is set.
     */
    boolean hasInformation();
    /**
     * <code>optional string information = 4;</code>
     * @return The information.
     */
    java.lang.String getInformation();
    /**
     * <code>optional string information = 4;</code>
     * @return The bytes for information.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getInformationBytes();

    /**
     * <code>optional string subClusterState = 5;</code>
     * @return Whether the subClusterState field is set.
     */
    boolean hasSubClusterState();
    /**
     * <code>optional string subClusterState = 5;</code>
     * @return The subClusterState.
     */
    java.lang.String getSubClusterState();
    /**
     * <code>optional string subClusterState = 5;</code>
     * @return The bytes for subClusterState.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterStateBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.DeregisterSubClustersProto}
   */
  public static final class DeregisterSubClustersProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.DeregisterSubClustersProto)
      DeregisterSubClustersProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use DeregisterSubClustersProto.newBuilder() to construct.
    private DeregisterSubClustersProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private DeregisterSubClustersProto() {
      subClusterId_ = "";
      deregisterState_ = "";
      lastHeartBeatTime_ = "";
      information_ = "";
      subClusterState_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new DeregisterSubClustersProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_DeregisterSubClustersProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.class, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder.class);
    }

    private int bitField0_;
    public static final int SUBCLUSTERID_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object subClusterId_ = "";
    /**
     * <code>optional string subClusterId = 1;</code>
     * @return Whether the subClusterId field is set.
     */
    @java.lang.Override
    public boolean hasSubClusterId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string subClusterId = 1;</code>
     * @return The subClusterId.
     */
    @java.lang.Override
    public java.lang.String getSubClusterId() {
      java.lang.Object ref = subClusterId_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          subClusterId_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string subClusterId = 1;</code>
     * @return The bytes for subClusterId.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterIdBytes() {
      java.lang.Object ref = subClusterId_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        subClusterId_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int DEREGISTERSTATE_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object deregisterState_ = "";
    /**
     * <code>optional string deregisterState = 2;</code>
     * @return Whether the deregisterState field is set.
     */
    @java.lang.Override
    public boolean hasDeregisterState() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string deregisterState = 2;</code>
     * @return The deregisterState.
     */
    @java.lang.Override
    public java.lang.String getDeregisterState() {
      java.lang.Object ref = deregisterState_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          deregisterState_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string deregisterState = 2;</code>
     * @return The bytes for deregisterState.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDeregisterStateBytes() {
      java.lang.Object ref = deregisterState_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        deregisterState_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int LASTHEARTBEATTIME_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object lastHeartBeatTime_ = "";
    /**
     * <code>optional string lastHeartBeatTime = 3;</code>
     * @return Whether the lastHeartBeatTime field is set.
     */
    @java.lang.Override
    public boolean hasLastHeartBeatTime() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string lastHeartBeatTime = 3;</code>
     * @return The lastHeartBeatTime.
     */
    @java.lang.Override
    public java.lang.String getLastHeartBeatTime() {
      java.lang.Object ref = lastHeartBeatTime_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          lastHeartBeatTime_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string lastHeartBeatTime = 3;</code>
     * @return The bytes for lastHeartBeatTime.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getLastHeartBeatTimeBytes() {
      java.lang.Object ref = lastHeartBeatTime_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        lastHeartBeatTime_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int INFORMATION_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private volatile java.lang.Object information_ = "";
    /**
     * <code>optional string information = 4;</code>
     * @return Whether the information field is set.
     */
    @java.lang.Override
    public boolean hasInformation() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional string information = 4;</code>
     * @return The information.
     */
    @java.lang.Override
    public java.lang.String getInformation() {
      java.lang.Object ref = information_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          information_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string information = 4;</code>
     * @return The bytes for information.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getInformationBytes() {
      java.lang.Object ref = information_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        information_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int SUBCLUSTERSTATE_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private volatile java.lang.Object subClusterState_ = "";
    /**
     * <code>optional string subClusterState = 5;</code>
     * @return Whether the subClusterState field is set.
     */
    @java.lang.Override
    public boolean hasSubClusterState() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional string subClusterState = 5;</code>
     * @return The subClusterState.
     */
    @java.lang.Override
    public java.lang.String getSubClusterState() {
      java.lang.Object ref = subClusterState_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          subClusterState_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string subClusterState = 5;</code>
     * @return The bytes for subClusterState.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterStateBytes() {
      java.lang.Object ref = subClusterState_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        subClusterState_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, deregisterState_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, lastHeartBeatTime_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, information_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, subClusterState_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, deregisterState_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, lastHeartBeatTime_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, information_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, subClusterState_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto other = (org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto) obj;

      if (hasSubClusterId() != other.hasSubClusterId()) return false;
      if (hasSubClusterId()) {
        if (!getSubClusterId()
            .equals(other.getSubClusterId())) return false;
      }
      if (hasDeregisterState() != other.hasDeregisterState()) return false;
      if (hasDeregisterState()) {
        if (!getDeregisterState()
            .equals(other.getDeregisterState())) return false;
      }
      if (hasLastHeartBeatTime() != other.hasLastHeartBeatTime()) return false;
      if (hasLastHeartBeatTime()) {
        if (!getLastHeartBeatTime()
            .equals(other.getLastHeartBeatTime())) return false;
      }
      if (hasInformation() != other.hasInformation()) return false;
      if (hasInformation()) {
        if (!getInformation()
            .equals(other.getInformation())) return false;
      }
      if (hasSubClusterState() != other.hasSubClusterState()) return false;
      if (hasSubClusterState()) {
        if (!getSubClusterState()
            .equals(other.getSubClusterState())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasSubClusterId()) {
        hash = (37 * hash) + SUBCLUSTERID_FIELD_NUMBER;
        hash = (53 * hash) + getSubClusterId().hashCode();
      }
      if (hasDeregisterState()) {
        hash = (37 * hash) + DEREGISTERSTATE_FIELD_NUMBER;
        hash = (53 * hash) + getDeregisterState().hashCode();
      }
      if (hasLastHeartBeatTime()) {
        hash = (37 * hash) + LASTHEARTBEATTIME_FIELD_NUMBER;
        hash = (53 * hash) + getLastHeartBeatTime().hashCode();
      }
      if (hasInformation()) {
        hash = (37 * hash) + INFORMATION_FIELD_NUMBER;
        hash = (53 * hash) + getInformation().hashCode();
      }
      if (hasSubClusterState()) {
        hash = (37 * hash) + SUBCLUSTERSTATE_FIELD_NUMBER;
        hash = (53 * hash) + getSubClusterState().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.DeregisterSubClustersProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.DeregisterSubClustersProto)
        org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_DeregisterSubClustersProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.class, org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        subClusterId_ = "";
        deregisterState_ = "";
        lastHeartBeatTime_ = "";
        information_ = "";
        subClusterState_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto result = new org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.subClusterId_ = subClusterId_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.deregisterState_ = deregisterState_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.lastHeartBeatTime_ = lastHeartBeatTime_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.information_ = information_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.subClusterState_ = subClusterState_;
          to_bitField0_ |= 0x00000010;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto.getDefaultInstance()) return this;
        if (other.hasSubClusterId()) {
          subClusterId_ = other.subClusterId_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasDeregisterState()) {
          deregisterState_ = other.deregisterState_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasLastHeartBeatTime()) {
          lastHeartBeatTime_ = other.lastHeartBeatTime_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasInformation()) {
          information_ = other.information_;
          bitField0_ |= 0x00000008;
          onChanged();
        }
        if (other.hasSubClusterState()) {
          subClusterState_ = other.subClusterState_;
          bitField0_ |= 0x00000010;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                subClusterId_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                deregisterState_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                lastHeartBeatTime_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                information_ = input.readBytes();
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                subClusterState_ = input.readBytes();
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object subClusterId_ = "";
      /**
       * <code>optional string subClusterId = 1;</code>
       * @return Whether the subClusterId field is set.
       */
      public boolean hasSubClusterId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string subClusterId = 1;</code>
       * @return The subClusterId.
       */
      public java.lang.String getSubClusterId() {
        java.lang.Object ref = subClusterId_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            subClusterId_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string subClusterId = 1;</code>
       * @return The bytes for subClusterId.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getSubClusterIdBytes() {
        java.lang.Object ref = subClusterId_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          subClusterId_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string subClusterId = 1;</code>
       * @param value The subClusterId to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterId(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterId = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearSubClusterId() {
        subClusterId_ = getDefaultInstance().getSubClusterId();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterId = 1;</code>
       * @param value The bytes for subClusterId to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterIdBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object deregisterState_ = "";
      /**
       * <code>optional string deregisterState = 2;</code>
       * @return Whether the deregisterState field is set.
       */
      public boolean hasDeregisterState() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string deregisterState = 2;</code>
       * @return The deregisterState.
       */
      public java.lang.String getDeregisterState() {
        java.lang.Object ref = deregisterState_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            deregisterState_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string deregisterState = 2;</code>
       * @return The bytes for deregisterState.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDeregisterStateBytes() {
        java.lang.Object ref = deregisterState_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          deregisterState_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string deregisterState = 2;</code>
       * @param value The deregisterState to set.
       * @return This builder for chaining.
       */
      public Builder setDeregisterState(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        deregisterState_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string deregisterState = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearDeregisterState() {
        deregisterState_ = getDefaultInstance().getDeregisterState();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string deregisterState = 2;</code>
       * @param value The bytes for deregisterState to set.
       * @return This builder for chaining.
       */
      public Builder setDeregisterStateBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        deregisterState_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private java.lang.Object lastHeartBeatTime_ = "";
      /**
       * <code>optional string lastHeartBeatTime = 3;</code>
       * @return Whether the lastHeartBeatTime field is set.
       */
      public boolean hasLastHeartBeatTime() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string lastHeartBeatTime = 3;</code>
       * @return The lastHeartBeatTime.
       */
      public java.lang.String getLastHeartBeatTime() {
        java.lang.Object ref = lastHeartBeatTime_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            lastHeartBeatTime_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string lastHeartBeatTime = 3;</code>
       * @return The bytes for lastHeartBeatTime.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getLastHeartBeatTimeBytes() {
        java.lang.Object ref = lastHeartBeatTime_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          lastHeartBeatTime_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string lastHeartBeatTime = 3;</code>
       * @param value The lastHeartBeatTime to set.
       * @return This builder for chaining.
       */
      public Builder setLastHeartBeatTime(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        lastHeartBeatTime_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string lastHeartBeatTime = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearLastHeartBeatTime() {
        lastHeartBeatTime_ = getDefaultInstance().getLastHeartBeatTime();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string lastHeartBeatTime = 3;</code>
       * @param value The bytes for lastHeartBeatTime to set.
       * @return This builder for chaining.
       */
      public Builder setLastHeartBeatTimeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        lastHeartBeatTime_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private java.lang.Object information_ = "";
      /**
       * <code>optional string information = 4;</code>
       * @return Whether the information field is set.
       */
      public boolean hasInformation() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional string information = 4;</code>
       * @return The information.
       */
      public java.lang.String getInformation() {
        java.lang.Object ref = information_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            information_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string information = 4;</code>
       * @return The bytes for information.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getInformationBytes() {
        java.lang.Object ref = information_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          information_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string information = 4;</code>
       * @param value The information to set.
       * @return This builder for chaining.
       */
      public Builder setInformation(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        information_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional string information = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearInformation() {
        information_ = getDefaultInstance().getInformation();
        bitField0_ = (bitField0_ & ~0x00000008);
        onChanged();
        return this;
      }
      /**
       * <code>optional string information = 4;</code>
       * @param value The bytes for information to set.
       * @return This builder for chaining.
       */
      public Builder setInformationBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        information_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }

      private java.lang.Object subClusterState_ = "";
      /**
       * <code>optional string subClusterState = 5;</code>
       * @return Whether the subClusterState field is set.
       */
      public boolean hasSubClusterState() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional string subClusterState = 5;</code>
       * @return The subClusterState.
       */
      public java.lang.String getSubClusterState() {
        java.lang.Object ref = subClusterState_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            subClusterState_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string subClusterState = 5;</code>
       * @return The bytes for subClusterState.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getSubClusterStateBytes() {
        java.lang.Object ref = subClusterState_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          subClusterState_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string subClusterState = 5;</code>
       * @param value The subClusterState to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterState(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterState_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterState = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearSubClusterState() {
        subClusterState_ = getDefaultInstance().getSubClusterState();
        bitField0_ = (bitField0_ & ~0x00000010);
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterState = 5;</code>
       * @param value The bytes for subClusterState to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterStateBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterState_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.DeregisterSubClustersProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.DeregisterSubClustersProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<DeregisterSubClustersProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<DeregisterSubClustersProto>() {
      @java.lang.Override
      public DeregisterSubClustersProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<DeregisterSubClustersProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<DeregisterSubClustersProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.DeregisterSubClustersProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface FederationQueueWeightProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.FederationQueueWeightProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string routerWeight = 1;</code>
     * @return Whether the routerWeight field is set.
     */
    boolean hasRouterWeight();
    /**
     * <code>optional string routerWeight = 1;</code>
     * @return The routerWeight.
     */
    java.lang.String getRouterWeight();
    /**
     * <code>optional string routerWeight = 1;</code>
     * @return The bytes for routerWeight.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getRouterWeightBytes();

    /**
     * <code>optional string amrmWeight = 2;</code>
     * @return Whether the amrmWeight field is set.
     */
    boolean hasAmrmWeight();
    /**
     * <code>optional string amrmWeight = 2;</code>
     * @return The amrmWeight.
     */
    java.lang.String getAmrmWeight();
    /**
     * <code>optional string amrmWeight = 2;</code>
     * @return The bytes for amrmWeight.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAmrmWeightBytes();

    /**
     * <code>optional string headRoomAlpha = 3;</code>
     * @return Whether the headRoomAlpha field is set.
     */
    boolean hasHeadRoomAlpha();
    /**
     * <code>optional string headRoomAlpha = 3;</code>
     * @return The headRoomAlpha.
     */
    java.lang.String getHeadRoomAlpha();
    /**
     * <code>optional string headRoomAlpha = 3;</code>
     * @return The bytes for headRoomAlpha.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getHeadRoomAlphaBytes();

    /**
     * <code>optional string queue = 4;</code>
     * @return Whether the queue field is set.
     */
    boolean hasQueue();
    /**
     * <code>optional string queue = 4;</code>
     * @return The queue.
     */
    java.lang.String getQueue();
    /**
     * <code>optional string queue = 4;</code>
     * @return The bytes for queue.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes();

    /**
     * <code>optional string policyManagerClassName = 5;</code>
     * @return Whether the policyManagerClassName field is set.
     */
    boolean hasPolicyManagerClassName();
    /**
     * <code>optional string policyManagerClassName = 5;</code>
     * @return The policyManagerClassName.
     */
    java.lang.String getPolicyManagerClassName();
    /**
     * <code>optional string policyManagerClassName = 5;</code>
     * @return The bytes for policyManagerClassName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getPolicyManagerClassNameBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.FederationQueueWeightProto}
   */
  public static final class FederationQueueWeightProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.FederationQueueWeightProto)
      FederationQueueWeightProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use FederationQueueWeightProto.newBuilder() to construct.
    private FederationQueueWeightProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private FederationQueueWeightProto() {
      routerWeight_ = "";
      amrmWeight_ = "";
      headRoomAlpha_ = "";
      queue_ = "";
      policyManagerClassName_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new FederationQueueWeightProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationQueueWeightProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.class, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder.class);
    }

    private int bitField0_;
    public static final int ROUTERWEIGHT_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object routerWeight_ = "";
    /**
     * <code>optional string routerWeight = 1;</code>
     * @return Whether the routerWeight field is set.
     */
    @java.lang.Override
    public boolean hasRouterWeight() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string routerWeight = 1;</code>
     * @return The routerWeight.
     */
    @java.lang.Override
    public java.lang.String getRouterWeight() {
      java.lang.Object ref = routerWeight_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          routerWeight_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string routerWeight = 1;</code>
     * @return The bytes for routerWeight.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getRouterWeightBytes() {
      java.lang.Object ref = routerWeight_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        routerWeight_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int AMRMWEIGHT_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object amrmWeight_ = "";
    /**
     * <code>optional string amrmWeight = 2;</code>
     * @return Whether the amrmWeight field is set.
     */
    @java.lang.Override
    public boolean hasAmrmWeight() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string amrmWeight = 2;</code>
     * @return The amrmWeight.
     */
    @java.lang.Override
    public java.lang.String getAmrmWeight() {
      java.lang.Object ref = amrmWeight_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          amrmWeight_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string amrmWeight = 2;</code>
     * @return The bytes for amrmWeight.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAmrmWeightBytes() {
      java.lang.Object ref = amrmWeight_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        amrmWeight_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int HEADROOMALPHA_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object headRoomAlpha_ = "";
    /**
     * <code>optional string headRoomAlpha = 3;</code>
     * @return Whether the headRoomAlpha field is set.
     */
    @java.lang.Override
    public boolean hasHeadRoomAlpha() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string headRoomAlpha = 3;</code>
     * @return The headRoomAlpha.
     */
    @java.lang.Override
    public java.lang.String getHeadRoomAlpha() {
      java.lang.Object ref = headRoomAlpha_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          headRoomAlpha_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string headRoomAlpha = 3;</code>
     * @return The bytes for headRoomAlpha.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getHeadRoomAlphaBytes() {
      java.lang.Object ref = headRoomAlpha_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        headRoomAlpha_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int QUEUE_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private volatile java.lang.Object queue_ = "";
    /**
     * <code>optional string queue = 4;</code>
     * @return Whether the queue field is set.
     */
    @java.lang.Override
    public boolean hasQueue() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional string queue = 4;</code>
     * @return The queue.
     */
    @java.lang.Override
    public java.lang.String getQueue() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queue_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string queue = 4;</code>
     * @return The bytes for queue.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int POLICYMANAGERCLASSNAME_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private volatile java.lang.Object policyManagerClassName_ = "";
    /**
     * <code>optional string policyManagerClassName = 5;</code>
     * @return Whether the policyManagerClassName field is set.
     */
    @java.lang.Override
    public boolean hasPolicyManagerClassName() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional string policyManagerClassName = 5;</code>
     * @return The policyManagerClassName.
     */
    @java.lang.Override
    public java.lang.String getPolicyManagerClassName() {
      java.lang.Object ref = policyManagerClassName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          policyManagerClassName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string policyManagerClassName = 5;</code>
     * @return The bytes for policyManagerClassName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getPolicyManagerClassNameBytes() {
      java.lang.Object ref = policyManagerClassName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        policyManagerClassName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, routerWeight_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, amrmWeight_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, headRoomAlpha_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, queue_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, policyManagerClassName_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, routerWeight_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, amrmWeight_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, headRoomAlpha_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, queue_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, policyManagerClassName_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto other = (org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto) obj;

      if (hasRouterWeight() != other.hasRouterWeight()) return false;
      if (hasRouterWeight()) {
        if (!getRouterWeight()
            .equals(other.getRouterWeight())) return false;
      }
      if (hasAmrmWeight() != other.hasAmrmWeight()) return false;
      if (hasAmrmWeight()) {
        if (!getAmrmWeight()
            .equals(other.getAmrmWeight())) return false;
      }
      if (hasHeadRoomAlpha() != other.hasHeadRoomAlpha()) return false;
      if (hasHeadRoomAlpha()) {
        if (!getHeadRoomAlpha()
            .equals(other.getHeadRoomAlpha())) return false;
      }
      if (hasQueue() != other.hasQueue()) return false;
      if (hasQueue()) {
        if (!getQueue()
            .equals(other.getQueue())) return false;
      }
      if (hasPolicyManagerClassName() != other.hasPolicyManagerClassName()) return false;
      if (hasPolicyManagerClassName()) {
        if (!getPolicyManagerClassName()
            .equals(other.getPolicyManagerClassName())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasRouterWeight()) {
        hash = (37 * hash) + ROUTERWEIGHT_FIELD_NUMBER;
        hash = (53 * hash) + getRouterWeight().hashCode();
      }
      if (hasAmrmWeight()) {
        hash = (37 * hash) + AMRMWEIGHT_FIELD_NUMBER;
        hash = (53 * hash) + getAmrmWeight().hashCode();
      }
      if (hasHeadRoomAlpha()) {
        hash = (37 * hash) + HEADROOMALPHA_FIELD_NUMBER;
        hash = (53 * hash) + getHeadRoomAlpha().hashCode();
      }
      if (hasQueue()) {
        hash = (37 * hash) + QUEUE_FIELD_NUMBER;
        hash = (53 * hash) + getQueue().hashCode();
      }
      if (hasPolicyManagerClassName()) {
        hash = (37 * hash) + POLICYMANAGERCLASSNAME_FIELD_NUMBER;
        hash = (53 * hash) + getPolicyManagerClassName().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.FederationQueueWeightProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.FederationQueueWeightProto)
        org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationQueueWeightProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.class, org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        routerWeight_ = "";
        amrmWeight_ = "";
        headRoomAlpha_ = "";
        queue_ = "";
        policyManagerClassName_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto result = new org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.routerWeight_ = routerWeight_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.amrmWeight_ = amrmWeight_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.headRoomAlpha_ = headRoomAlpha_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.queue_ = queue_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.policyManagerClassName_ = policyManagerClassName_;
          to_bitField0_ |= 0x00000010;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto.getDefaultInstance()) return this;
        if (other.hasRouterWeight()) {
          routerWeight_ = other.routerWeight_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasAmrmWeight()) {
          amrmWeight_ = other.amrmWeight_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasHeadRoomAlpha()) {
          headRoomAlpha_ = other.headRoomAlpha_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasQueue()) {
          queue_ = other.queue_;
          bitField0_ |= 0x00000008;
          onChanged();
        }
        if (other.hasPolicyManagerClassName()) {
          policyManagerClassName_ = other.policyManagerClassName_;
          bitField0_ |= 0x00000010;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                routerWeight_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                amrmWeight_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                headRoomAlpha_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                queue_ = input.readBytes();
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                policyManagerClassName_ = input.readBytes();
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object routerWeight_ = "";
      /**
       * <code>optional string routerWeight = 1;</code>
       * @return Whether the routerWeight field is set.
       */
      public boolean hasRouterWeight() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string routerWeight = 1;</code>
       * @return The routerWeight.
       */
      public java.lang.String getRouterWeight() {
        java.lang.Object ref = routerWeight_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            routerWeight_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string routerWeight = 1;</code>
       * @return The bytes for routerWeight.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getRouterWeightBytes() {
        java.lang.Object ref = routerWeight_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          routerWeight_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string routerWeight = 1;</code>
       * @param value The routerWeight to set.
       * @return This builder for chaining.
       */
      public Builder setRouterWeight(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        routerWeight_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string routerWeight = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearRouterWeight() {
        routerWeight_ = getDefaultInstance().getRouterWeight();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string routerWeight = 1;</code>
       * @param value The bytes for routerWeight to set.
       * @return This builder for chaining.
       */
      public Builder setRouterWeightBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        routerWeight_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object amrmWeight_ = "";
      /**
       * <code>optional string amrmWeight = 2;</code>
       * @return Whether the amrmWeight field is set.
       */
      public boolean hasAmrmWeight() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string amrmWeight = 2;</code>
       * @return The amrmWeight.
       */
      public java.lang.String getAmrmWeight() {
        java.lang.Object ref = amrmWeight_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            amrmWeight_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string amrmWeight = 2;</code>
       * @return The bytes for amrmWeight.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAmrmWeightBytes() {
        java.lang.Object ref = amrmWeight_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          amrmWeight_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string amrmWeight = 2;</code>
       * @param value The amrmWeight to set.
       * @return This builder for chaining.
       */
      public Builder setAmrmWeight(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        amrmWeight_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string amrmWeight = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearAmrmWeight() {
        amrmWeight_ = getDefaultInstance().getAmrmWeight();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string amrmWeight = 2;</code>
       * @param value The bytes for amrmWeight to set.
       * @return This builder for chaining.
       */
      public Builder setAmrmWeightBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        amrmWeight_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private java.lang.Object headRoomAlpha_ = "";
      /**
       * <code>optional string headRoomAlpha = 3;</code>
       * @return Whether the headRoomAlpha field is set.
       */
      public boolean hasHeadRoomAlpha() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string headRoomAlpha = 3;</code>
       * @return The headRoomAlpha.
       */
      public java.lang.String getHeadRoomAlpha() {
        java.lang.Object ref = headRoomAlpha_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            headRoomAlpha_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string headRoomAlpha = 3;</code>
       * @return The bytes for headRoomAlpha.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getHeadRoomAlphaBytes() {
        java.lang.Object ref = headRoomAlpha_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          headRoomAlpha_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string headRoomAlpha = 3;</code>
       * @param value The headRoomAlpha to set.
       * @return This builder for chaining.
       */
      public Builder setHeadRoomAlpha(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        headRoomAlpha_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string headRoomAlpha = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearHeadRoomAlpha() {
        headRoomAlpha_ = getDefaultInstance().getHeadRoomAlpha();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string headRoomAlpha = 3;</code>
       * @param value The bytes for headRoomAlpha to set.
       * @return This builder for chaining.
       */
      public Builder setHeadRoomAlphaBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        headRoomAlpha_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private java.lang.Object queue_ = "";
      /**
       * <code>optional string queue = 4;</code>
       * @return Whether the queue field is set.
       */
      public boolean hasQueue() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional string queue = 4;</code>
       * @return The queue.
       */
      public java.lang.String getQueue() {
        java.lang.Object ref = queue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string queue = 4;</code>
       * @return The bytes for queue.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueBytes() {
        java.lang.Object ref = queue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string queue = 4;</code>
       * @param value The queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueue(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueue() {
        queue_ = getDefaultInstance().getQueue();
        bitField0_ = (bitField0_ & ~0x00000008);
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 4;</code>
       * @param value The bytes for queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }

      private java.lang.Object policyManagerClassName_ = "";
      /**
       * <code>optional string policyManagerClassName = 5;</code>
       * @return Whether the policyManagerClassName field is set.
       */
      public boolean hasPolicyManagerClassName() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional string policyManagerClassName = 5;</code>
       * @return The policyManagerClassName.
       */
      public java.lang.String getPolicyManagerClassName() {
        java.lang.Object ref = policyManagerClassName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            policyManagerClassName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string policyManagerClassName = 5;</code>
       * @return The bytes for policyManagerClassName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getPolicyManagerClassNameBytes() {
        java.lang.Object ref = policyManagerClassName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          policyManagerClassName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string policyManagerClassName = 5;</code>
       * @param value The policyManagerClassName to set.
       * @return This builder for chaining.
       */
      public Builder setPolicyManagerClassName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        policyManagerClassName_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional string policyManagerClassName = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearPolicyManagerClassName() {
        policyManagerClassName_ = getDefaultInstance().getPolicyManagerClassName();
        bitField0_ = (bitField0_ & ~0x00000010);
        onChanged();
        return this;
      }
      /**
       * <code>optional string policyManagerClassName = 5;</code>
       * @param value The bytes for policyManagerClassName to set.
       * @return This builder for chaining.
       */
      public Builder setPolicyManagerClassNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        policyManagerClassName_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.FederationQueueWeightProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.FederationQueueWeightProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<FederationQueueWeightProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<FederationQueueWeightProto>() {
      @java.lang.Override
      public FederationQueueWeightProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<FederationQueueWeightProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<FederationQueueWeightProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.FederationQueueWeightProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface FederationSubClusterProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.FederationSubClusterProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string subClusterId = 1;</code>
     * @return Whether the subClusterId field is set.
     */
    boolean hasSubClusterId();
    /**
     * <code>optional string subClusterId = 1;</code>
     * @return The subClusterId.
     */
    java.lang.String getSubClusterId();
    /**
     * <code>optional string subClusterId = 1;</code>
     * @return The bytes for subClusterId.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterIdBytes();

    /**
     * <code>optional string lastHeartBeatTime = 2;</code>
     * @return Whether the lastHeartBeatTime field is set.
     */
    boolean hasLastHeartBeatTime();
    /**
     * <code>optional string lastHeartBeatTime = 2;</code>
     * @return The lastHeartBeatTime.
     */
    java.lang.String getLastHeartBeatTime();
    /**
     * <code>optional string lastHeartBeatTime = 2;</code>
     * @return The bytes for lastHeartBeatTime.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getLastHeartBeatTimeBytes();

    /**
     * <code>optional string subClusterState = 3;</code>
     * @return Whether the subClusterState field is set.
     */
    boolean hasSubClusterState();
    /**
     * <code>optional string subClusterState = 3;</code>
     * @return The subClusterState.
     */
    java.lang.String getSubClusterState();
    /**
     * <code>optional string subClusterState = 3;</code>
     * @return The bytes for subClusterState.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterStateBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.FederationSubClusterProto}
   */
  public static final class FederationSubClusterProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.FederationSubClusterProto)
      FederationSubClusterProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use FederationSubClusterProto.newBuilder() to construct.
    private FederationSubClusterProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private FederationSubClusterProto() {
      subClusterId_ = "";
      lastHeartBeatTime_ = "";
      subClusterState_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new FederationSubClusterProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationSubClusterProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationSubClusterProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.class, org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.Builder.class);
    }

    private int bitField0_;
    public static final int SUBCLUSTERID_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object subClusterId_ = "";
    /**
     * <code>optional string subClusterId = 1;</code>
     * @return Whether the subClusterId field is set.
     */
    @java.lang.Override
    public boolean hasSubClusterId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string subClusterId = 1;</code>
     * @return The subClusterId.
     */
    @java.lang.Override
    public java.lang.String getSubClusterId() {
      java.lang.Object ref = subClusterId_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          subClusterId_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string subClusterId = 1;</code>
     * @return The bytes for subClusterId.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterIdBytes() {
      java.lang.Object ref = subClusterId_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        subClusterId_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int LASTHEARTBEATTIME_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object lastHeartBeatTime_ = "";
    /**
     * <code>optional string lastHeartBeatTime = 2;</code>
     * @return Whether the lastHeartBeatTime field is set.
     */
    @java.lang.Override
    public boolean hasLastHeartBeatTime() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string lastHeartBeatTime = 2;</code>
     * @return The lastHeartBeatTime.
     */
    @java.lang.Override
    public java.lang.String getLastHeartBeatTime() {
      java.lang.Object ref = lastHeartBeatTime_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          lastHeartBeatTime_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string lastHeartBeatTime = 2;</code>
     * @return The bytes for lastHeartBeatTime.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getLastHeartBeatTimeBytes() {
      java.lang.Object ref = lastHeartBeatTime_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        lastHeartBeatTime_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int SUBCLUSTERSTATE_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object subClusterState_ = "";
    /**
     * <code>optional string subClusterState = 3;</code>
     * @return Whether the subClusterState field is set.
     */
    @java.lang.Override
    public boolean hasSubClusterState() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string subClusterState = 3;</code>
     * @return The subClusterState.
     */
    @java.lang.Override
    public java.lang.String getSubClusterState() {
      java.lang.Object ref = subClusterState_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          subClusterState_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string subClusterState = 3;</code>
     * @return The bytes for subClusterState.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterStateBytes() {
      java.lang.Object ref = subClusterState_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        subClusterState_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, subClusterId_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, lastHeartBeatTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, subClusterState_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, subClusterId_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, lastHeartBeatTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, subClusterState_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto other = (org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto) obj;

      if (hasSubClusterId() != other.hasSubClusterId()) return false;
      if (hasSubClusterId()) {
        if (!getSubClusterId()
            .equals(other.getSubClusterId())) return false;
      }
      if (hasLastHeartBeatTime() != other.hasLastHeartBeatTime()) return false;
      if (hasLastHeartBeatTime()) {
        if (!getLastHeartBeatTime()
            .equals(other.getLastHeartBeatTime())) return false;
      }
      if (hasSubClusterState() != other.hasSubClusterState()) return false;
      if (hasSubClusterState()) {
        if (!getSubClusterState()
            .equals(other.getSubClusterState())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasSubClusterId()) {
        hash = (37 * hash) + SUBCLUSTERID_FIELD_NUMBER;
        hash = (53 * hash) + getSubClusterId().hashCode();
      }
      if (hasLastHeartBeatTime()) {
        hash = (37 * hash) + LASTHEARTBEATTIME_FIELD_NUMBER;
        hash = (53 * hash) + getLastHeartBeatTime().hashCode();
      }
      if (hasSubClusterState()) {
        hash = (37 * hash) + SUBCLUSTERSTATE_FIELD_NUMBER;
        hash = (53 * hash) + getSubClusterState().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.FederationSubClusterProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.FederationSubClusterProto)
        org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationSubClusterProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationSubClusterProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.class, org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        subClusterId_ = "";
        lastHeartBeatTime_ = "";
        subClusterState_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_FederationSubClusterProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto result = new org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.subClusterId_ = subClusterId_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.lastHeartBeatTime_ = lastHeartBeatTime_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.subClusterState_ = subClusterState_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto.getDefaultInstance()) return this;
        if (other.hasSubClusterId()) {
          subClusterId_ = other.subClusterId_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasLastHeartBeatTime()) {
          lastHeartBeatTime_ = other.lastHeartBeatTime_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasSubClusterState()) {
          subClusterState_ = other.subClusterState_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                subClusterId_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                lastHeartBeatTime_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                subClusterState_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object subClusterId_ = "";
      /**
       * <code>optional string subClusterId = 1;</code>
       * @return Whether the subClusterId field is set.
       */
      public boolean hasSubClusterId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string subClusterId = 1;</code>
       * @return The subClusterId.
       */
      public java.lang.String getSubClusterId() {
        java.lang.Object ref = subClusterId_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            subClusterId_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string subClusterId = 1;</code>
       * @return The bytes for subClusterId.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getSubClusterIdBytes() {
        java.lang.Object ref = subClusterId_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          subClusterId_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string subClusterId = 1;</code>
       * @param value The subClusterId to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterId(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterId = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearSubClusterId() {
        subClusterId_ = getDefaultInstance().getSubClusterId();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterId = 1;</code>
       * @param value The bytes for subClusterId to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterIdBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object lastHeartBeatTime_ = "";
      /**
       * <code>optional string lastHeartBeatTime = 2;</code>
       * @return Whether the lastHeartBeatTime field is set.
       */
      public boolean hasLastHeartBeatTime() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string lastHeartBeatTime = 2;</code>
       * @return The lastHeartBeatTime.
       */
      public java.lang.String getLastHeartBeatTime() {
        java.lang.Object ref = lastHeartBeatTime_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            lastHeartBeatTime_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string lastHeartBeatTime = 2;</code>
       * @return The bytes for lastHeartBeatTime.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getLastHeartBeatTimeBytes() {
        java.lang.Object ref = lastHeartBeatTime_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          lastHeartBeatTime_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string lastHeartBeatTime = 2;</code>
       * @param value The lastHeartBeatTime to set.
       * @return This builder for chaining.
       */
      public Builder setLastHeartBeatTime(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        lastHeartBeatTime_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string lastHeartBeatTime = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearLastHeartBeatTime() {
        lastHeartBeatTime_ = getDefaultInstance().getLastHeartBeatTime();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string lastHeartBeatTime = 2;</code>
       * @param value The bytes for lastHeartBeatTime to set.
       * @return This builder for chaining.
       */
      public Builder setLastHeartBeatTimeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        lastHeartBeatTime_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private java.lang.Object subClusterState_ = "";
      /**
       * <code>optional string subClusterState = 3;</code>
       * @return Whether the subClusterState field is set.
       */
      public boolean hasSubClusterState() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string subClusterState = 3;</code>
       * @return The subClusterState.
       */
      public java.lang.String getSubClusterState() {
        java.lang.Object ref = subClusterState_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            subClusterState_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string subClusterState = 3;</code>
       * @return The bytes for subClusterState.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getSubClusterStateBytes() {
        java.lang.Object ref = subClusterState_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          subClusterState_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string subClusterState = 3;</code>
       * @param value The subClusterState to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterState(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterState_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterState = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearSubClusterState() {
        subClusterState_ = getDefaultInstance().getSubClusterState();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterState = 3;</code>
       * @param value The bytes for subClusterState to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterStateBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterState_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.FederationSubClusterProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.FederationSubClusterProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<FederationSubClusterProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<FederationSubClusterProto>() {
      @java.lang.Override
      public FederationSubClusterProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<FederationSubClusterProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<FederationSubClusterProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.FederationSubClusterProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
     * @return Whether the priority field is set.
     */
    boolean hasPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
     * @return The priority.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();

    /**
     * <code>optional string resource_name = 2;</code>
     * @return Whether the resourceName field is set.
     */
    boolean hasResourceName();
    /**
     * <code>optional string resource_name = 2;</code>
     * @return The resourceName.
     */
    java.lang.String getResourceName();
    /**
     * <code>optional string resource_name = 2;</code>
     * @return The bytes for resourceName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getResourceNameBytes();

    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
     * @return Whether the capability field is set.
     */
    boolean hasCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
     * @return The capability.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder();

    /**
     * <code>optional int32 num_containers = 4;</code>
     * @return Whether the numContainers field is set.
     */
    boolean hasNumContainers();
    /**
     * <code>optional int32 num_containers = 4;</code>
     * @return The numContainers.
     */
    int getNumContainers();

    /**
     * <code>optional bool relax_locality = 5 [default = true];</code>
     * @return Whether the relaxLocality field is set.
     */
    boolean hasRelaxLocality();
    /**
     * <code>optional bool relax_locality = 5 [default = true];</code>
     * @return The relaxLocality.
     */
    boolean getRelaxLocality();

    /**
     * <code>optional string node_label_expression = 6;</code>
     * @return Whether the nodeLabelExpression field is set.
     */
    boolean hasNodeLabelExpression();
    /**
     * <code>optional string node_label_expression = 6;</code>
     * @return The nodeLabelExpression.
     */
    java.lang.String getNodeLabelExpression();
    /**
     * <code>optional string node_label_expression = 6;</code>
     * @return The bytes for nodeLabelExpression.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelExpressionBytes();

    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
     * @return Whether the executionTypeRequest field is set.
     */
    boolean hasExecutionTypeRequest();
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
     * @return The executionTypeRequest.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionTypeRequest();
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeRequestOrBuilder();

    /**
     * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
     * @return Whether the allocationRequestId field is set.
     */
    boolean hasAllocationRequestId();
    /**
     * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
     * @return The allocationRequestId.
     */
    long getAllocationRequestId();
  }
  /**
   * <pre>
   *&#47;/////////////////////////////////////////////////////////////////////
   * //// From AM_RM_Protocol /////////////////////////////////////////////
   * //////////////////////////////////////////////////////////////////////
   * </pre>
   *
   * Protobuf type {@code hadoop.yarn.ResourceRequestProto}
   */
  public static final class ResourceRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceRequestProto)
      ResourceRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceRequestProto.newBuilder() to construct.
    private ResourceRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceRequestProto() {
      resourceName_ = "";
      relaxLocality_ = true;
      nodeLabelExpression_ = "";
      allocationRequestId_ = -1L;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int PRIORITY_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
     * @return Whether the priority field is set.
     */
    @java.lang.Override
    public boolean hasPriority() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
     * @return The priority.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }

    public static final int RESOURCE_NAME_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object resourceName_ = "";
    /**
     * <code>optional string resource_name = 2;</code>
     * @return Whether the resourceName field is set.
     */
    @java.lang.Override
    public boolean hasResourceName() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string resource_name = 2;</code>
     * @return The resourceName.
     */
    @java.lang.Override
    public java.lang.String getResourceName() {
      java.lang.Object ref = resourceName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          resourceName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string resource_name = 2;</code>
     * @return The bytes for resourceName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getResourceNameBytes() {
      java.lang.Object ref = resourceName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        resourceName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int CAPABILITY_FIELD_NUMBER = 3;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
     * @return Whether the capability field is set.
     */
    @java.lang.Override
    public boolean hasCapability() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
     * @return The capability.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() {
      return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() {
      return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
    }

    public static final int NUM_CONTAINERS_FIELD_NUMBER = 4;
    private int numContainers_ = 0;
    /**
     * <code>optional int32 num_containers = 4;</code>
     * @return Whether the numContainers field is set.
     */
    @java.lang.Override
    public boolean hasNumContainers() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional int32 num_containers = 4;</code>
     * @return The numContainers.
     */
    @java.lang.Override
    public int getNumContainers() {
      return numContainers_;
    }

    public static final int RELAX_LOCALITY_FIELD_NUMBER = 5;
    private boolean relaxLocality_ = true;
    /**
     * <code>optional bool relax_locality = 5 [default = true];</code>
     * @return Whether the relaxLocality field is set.
     */
    @java.lang.Override
    public boolean hasRelaxLocality() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional bool relax_locality = 5 [default = true];</code>
     * @return The relaxLocality.
     */
    @java.lang.Override
    public boolean getRelaxLocality() {
      return relaxLocality_;
    }

    public static final int NODE_LABEL_EXPRESSION_FIELD_NUMBER = 6;
    @SuppressWarnings("serial")
    private volatile java.lang.Object nodeLabelExpression_ = "";
    /**
     * <code>optional string node_label_expression = 6;</code>
     * @return Whether the nodeLabelExpression field is set.
     */
    @java.lang.Override
    public boolean hasNodeLabelExpression() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional string node_label_expression = 6;</code>
     * @return The nodeLabelExpression.
     */
    @java.lang.Override
    public java.lang.String getNodeLabelExpression() {
      java.lang.Object ref = nodeLabelExpression_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          nodeLabelExpression_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string node_label_expression = 6;</code>
     * @return The bytes for nodeLabelExpression.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelExpressionBytes() {
      java.lang.Object ref = nodeLabelExpression_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        nodeLabelExpression_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int EXECUTION_TYPE_REQUEST_FIELD_NUMBER = 7;
    private org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto executionTypeRequest_;
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
     * @return Whether the executionTypeRequest field is set.
     */
    @java.lang.Override
    public boolean hasExecutionTypeRequest() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
     * @return The executionTypeRequest.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionTypeRequest() {
      return executionTypeRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionTypeRequest_;
    }
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeRequestOrBuilder() {
      return executionTypeRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionTypeRequest_;
    }

    public static final int ALLOCATION_REQUEST_ID_FIELD_NUMBER = 8;
    private long allocationRequestId_ = -1L;
    /**
     * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
     * @return Whether the allocationRequestId field is set.
     */
    @java.lang.Override
    public boolean hasAllocationRequestId() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
     * @return The allocationRequestId.
     */
    @java.lang.Override
    public long getAllocationRequestId() {
      return allocationRequestId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasCapability()) {
        if (!getCapability().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getPriority());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, resourceName_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeMessage(3, getCapability());
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt32(4, numContainers_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeBool(5, relaxLocality_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 6, nodeLabelExpression_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeMessage(7, getExecutionTypeRequest());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeInt64(8, allocationRequestId_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getPriority());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, resourceName_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, getCapability());
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(4, numContainers_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(5, relaxLocality_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(6, nodeLabelExpression_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, getExecutionTypeRequest());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(8, allocationRequestId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto) obj;

      if (hasPriority() != other.hasPriority()) return false;
      if (hasPriority()) {
        if (!getPriority()
            .equals(other.getPriority())) return false;
      }
      if (hasResourceName() != other.hasResourceName()) return false;
      if (hasResourceName()) {
        if (!getResourceName()
            .equals(other.getResourceName())) return false;
      }
      if (hasCapability() != other.hasCapability()) return false;
      if (hasCapability()) {
        if (!getCapability()
            .equals(other.getCapability())) return false;
      }
      if (hasNumContainers() != other.hasNumContainers()) return false;
      if (hasNumContainers()) {
        if (getNumContainers()
            != other.getNumContainers()) return false;
      }
      if (hasRelaxLocality() != other.hasRelaxLocality()) return false;
      if (hasRelaxLocality()) {
        if (getRelaxLocality()
            != other.getRelaxLocality()) return false;
      }
      if (hasNodeLabelExpression() != other.hasNodeLabelExpression()) return false;
      if (hasNodeLabelExpression()) {
        if (!getNodeLabelExpression()
            .equals(other.getNodeLabelExpression())) return false;
      }
      if (hasExecutionTypeRequest() != other.hasExecutionTypeRequest()) return false;
      if (hasExecutionTypeRequest()) {
        if (!getExecutionTypeRequest()
            .equals(other.getExecutionTypeRequest())) return false;
      }
      if (hasAllocationRequestId() != other.hasAllocationRequestId()) return false;
      if (hasAllocationRequestId()) {
        if (getAllocationRequestId()
            != other.getAllocationRequestId()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasPriority()) {
        hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getPriority().hashCode();
      }
      if (hasResourceName()) {
        hash = (37 * hash) + RESOURCE_NAME_FIELD_NUMBER;
        hash = (53 * hash) + getResourceName().hashCode();
      }
      if (hasCapability()) {
        hash = (37 * hash) + CAPABILITY_FIELD_NUMBER;
        hash = (53 * hash) + getCapability().hashCode();
      }
      if (hasNumContainers()) {
        hash = (37 * hash) + NUM_CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getNumContainers();
      }
      if (hasRelaxLocality()) {
        hash = (37 * hash) + RELAX_LOCALITY_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getRelaxLocality());
      }
      if (hasNodeLabelExpression()) {
        hash = (37 * hash) + NODE_LABEL_EXPRESSION_FIELD_NUMBER;
        hash = (53 * hash) + getNodeLabelExpression().hashCode();
      }
      if (hasExecutionTypeRequest()) {
        hash = (37 * hash) + EXECUTION_TYPE_REQUEST_FIELD_NUMBER;
        hash = (53 * hash) + getExecutionTypeRequest().hashCode();
      }
      if (hasAllocationRequestId()) {
        hash = (37 * hash) + ALLOCATION_REQUEST_ID_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAllocationRequestId());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * <pre>
     *&#47;/////////////////////////////////////////////////////////////////////
     * //// From AM_RM_Protocol /////////////////////////////////////////////
     * //////////////////////////////////////////////////////////////////////
     * </pre>
     *
     * Protobuf type {@code hadoop.yarn.ResourceRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceRequestProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getPriorityFieldBuilder();
          getCapabilityFieldBuilder();
          getExecutionTypeRequestFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        resourceName_ = "";
        capability_ = null;
        if (capabilityBuilder_ != null) {
          capabilityBuilder_.dispose();
          capabilityBuilder_ = null;
        }
        numContainers_ = 0;
        relaxLocality_ = true;
        nodeLabelExpression_ = "";
        executionTypeRequest_ = null;
        if (executionTypeRequestBuilder_ != null) {
          executionTypeRequestBuilder_.dispose();
          executionTypeRequestBuilder_ = null;
        }
        allocationRequestId_ = -1L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.priority_ = priorityBuilder_ == null
              ? priority_
              : priorityBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.resourceName_ = resourceName_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.capability_ = capabilityBuilder_ == null
              ? capability_
              : capabilityBuilder_.build();
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.numContainers_ = numContainers_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.relaxLocality_ = relaxLocality_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.nodeLabelExpression_ = nodeLabelExpression_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.executionTypeRequest_ = executionTypeRequestBuilder_ == null
              ? executionTypeRequest_
              : executionTypeRequestBuilder_.build();
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.allocationRequestId_ = allocationRequestId_;
          to_bitField0_ |= 0x00000080;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance()) return this;
        if (other.hasPriority()) {
          mergePriority(other.getPriority());
        }
        if (other.hasResourceName()) {
          resourceName_ = other.resourceName_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasCapability()) {
          mergeCapability(other.getCapability());
        }
        if (other.hasNumContainers()) {
          setNumContainers(other.getNumContainers());
        }
        if (other.hasRelaxLocality()) {
          setRelaxLocality(other.getRelaxLocality());
        }
        if (other.hasNodeLabelExpression()) {
          nodeLabelExpression_ = other.nodeLabelExpression_;
          bitField0_ |= 0x00000020;
          onChanged();
        }
        if (other.hasExecutionTypeRequest()) {
          mergeExecutionTypeRequest(other.getExecutionTypeRequest());
        }
        if (other.hasAllocationRequestId()) {
          setAllocationRequestId(other.getAllocationRequestId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasCapability()) {
          if (!getCapability().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getPriorityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                resourceName_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                input.readMessage(
                    getCapabilityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 32: {
                numContainers_ = input.readInt32();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              case 40: {
                relaxLocality_ = input.readBool();
                bitField0_ |= 0x00000010;
                break;
              } // case 40
              case 50: {
                nodeLabelExpression_ = input.readBytes();
                bitField0_ |= 0x00000020;
                break;
              } // case 50
              case 58: {
                input.readMessage(
                    getExecutionTypeRequestFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000040;
                break;
              } // case 58
              case 64: {
                allocationRequestId_ = input.readInt64();
                bitField0_ |= 0x00000080;
                break;
              } // case 64
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
       * @return Whether the priority field is set.
       */
      public boolean hasPriority() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
       * @return The priority.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
        if (priorityBuilder_ == null) {
          return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        } else {
          return priorityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
       */
      public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          priority_ = value;
        } else {
          priorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
       */
      public Builder setPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (priorityBuilder_ == null) {
          priority_ = builderForValue.build();
        } else {
          priorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
       */
      public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            priority_ != null &&
            priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            getPriorityBuilder().mergeFrom(value);
          } else {
            priority_ = value;
          }
        } else {
          priorityBuilder_.mergeFrom(value);
        }
        if (priority_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
       */
      public Builder clearPriority() {
        bitField0_ = (bitField0_ & ~0x00000001);
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getPriorityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
        if (priorityBuilder_ != null) {
          return priorityBuilder_.getMessageOrBuilder();
        } else {
          return priority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getPriorityFieldBuilder() {
        if (priorityBuilder_ == null) {
          priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getPriority(),
                  getParentForChildren(),
                  isClean());
          priority_ = null;
        }
        return priorityBuilder_;
      }

      private java.lang.Object resourceName_ = "";
      /**
       * <code>optional string resource_name = 2;</code>
       * @return Whether the resourceName field is set.
       */
      public boolean hasResourceName() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string resource_name = 2;</code>
       * @return The resourceName.
       */
      public java.lang.String getResourceName() {
        java.lang.Object ref = resourceName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            resourceName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string resource_name = 2;</code>
       * @return The bytes for resourceName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getResourceNameBytes() {
        java.lang.Object ref = resourceName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          resourceName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string resource_name = 2;</code>
       * @param value The resourceName to set.
       * @return This builder for chaining.
       */
      public Builder setResourceName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        resourceName_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string resource_name = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearResourceName() {
        resourceName_ = getDefaultInstance().getResourceName();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string resource_name = 2;</code>
       * @param value The bytes for resourceName to set.
       * @return This builder for chaining.
       */
      public Builder setResourceNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        resourceName_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> capabilityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
       * @return Whether the capability field is set.
       */
      public boolean hasCapability() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
       * @return The capability.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() {
        if (capabilityBuilder_ == null) {
          return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
        } else {
          return capabilityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
       */
      public Builder setCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (capabilityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          capability_ = value;
        } else {
          capabilityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
       */
      public Builder setCapability(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (capabilityBuilder_ == null) {
          capability_ = builderForValue.build();
        } else {
          capabilityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
       */
      public Builder mergeCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (capabilityBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0) &&
            capability_ != null &&
            capability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getCapabilityBuilder().mergeFrom(value);
          } else {
            capability_ = value;
          }
        } else {
          capabilityBuilder_.mergeFrom(value);
        }
        if (capability_ != null) {
          bitField0_ |= 0x00000004;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
       */
      public Builder clearCapability() {
        bitField0_ = (bitField0_ & ~0x00000004);
        capability_ = null;
        if (capabilityBuilder_ != null) {
          capabilityBuilder_.dispose();
          capabilityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getCapabilityBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getCapabilityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() {
        if (capabilityBuilder_ != null) {
          return capabilityBuilder_.getMessageOrBuilder();
        } else {
          return capability_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 3;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getCapabilityFieldBuilder() {
        if (capabilityBuilder_ == null) {
          capabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getCapability(),
                  getParentForChildren(),
                  isClean());
          capability_ = null;
        }
        return capabilityBuilder_;
      }

      private int numContainers_ ;
      /**
       * <code>optional int32 num_containers = 4;</code>
       * @return Whether the numContainers field is set.
       */
      @java.lang.Override
      public boolean hasNumContainers() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int32 num_containers = 4;</code>
       * @return The numContainers.
       */
      @java.lang.Override
      public int getNumContainers() {
        return numContainers_;
      }
      /**
       * <code>optional int32 num_containers = 4;</code>
       * @param value The numContainers to set.
       * @return This builder for chaining.
       */
      public Builder setNumContainers(int value) {

        numContainers_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_containers = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumContainers() {
        bitField0_ = (bitField0_ & ~0x00000008);
        numContainers_ = 0;
        onChanged();
        return this;
      }

      private boolean relaxLocality_ = true;
      /**
       * <code>optional bool relax_locality = 5 [default = true];</code>
       * @return Whether the relaxLocality field is set.
       */
      @java.lang.Override
      public boolean hasRelaxLocality() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional bool relax_locality = 5 [default = true];</code>
       * @return The relaxLocality.
       */
      @java.lang.Override
      public boolean getRelaxLocality() {
        return relaxLocality_;
      }
      /**
       * <code>optional bool relax_locality = 5 [default = true];</code>
       * @param value The relaxLocality to set.
       * @return This builder for chaining.
       */
      public Builder setRelaxLocality(boolean value) {

        relaxLocality_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool relax_locality = 5 [default = true];</code>
       * @return This builder for chaining.
       */
      public Builder clearRelaxLocality() {
        bitField0_ = (bitField0_ & ~0x00000010);
        relaxLocality_ = true;
        onChanged();
        return this;
      }

      private java.lang.Object nodeLabelExpression_ = "";
      /**
       * <code>optional string node_label_expression = 6;</code>
       * @return Whether the nodeLabelExpression field is set.
       */
      public boolean hasNodeLabelExpression() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional string node_label_expression = 6;</code>
       * @return The nodeLabelExpression.
       */
      public java.lang.String getNodeLabelExpression() {
        java.lang.Object ref = nodeLabelExpression_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            nodeLabelExpression_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string node_label_expression = 6;</code>
       * @return The bytes for nodeLabelExpression.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNodeLabelExpressionBytes() {
        java.lang.Object ref = nodeLabelExpression_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          nodeLabelExpression_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string node_label_expression = 6;</code>
       * @param value The nodeLabelExpression to set.
       * @return This builder for chaining.
       */
      public Builder setNodeLabelExpression(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        nodeLabelExpression_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional string node_label_expression = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeLabelExpression() {
        nodeLabelExpression_ = getDefaultInstance().getNodeLabelExpression();
        bitField0_ = (bitField0_ & ~0x00000020);
        onChanged();
        return this;
      }
      /**
       * <code>optional string node_label_expression = 6;</code>
       * @param value The bytes for nodeLabelExpression to set.
       * @return This builder for chaining.
       */
      public Builder setNodeLabelExpressionBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        nodeLabelExpression_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto executionTypeRequest_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder> executionTypeRequestBuilder_;
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
       * @return Whether the executionTypeRequest field is set.
       */
      public boolean hasExecutionTypeRequest() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
       * @return The executionTypeRequest.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionTypeRequest() {
        if (executionTypeRequestBuilder_ == null) {
          return executionTypeRequest_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionTypeRequest_;
        } else {
          return executionTypeRequestBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
       */
      public Builder setExecutionTypeRequest(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto value) {
        if (executionTypeRequestBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          executionTypeRequest_ = value;
        } else {
          executionTypeRequestBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
       */
      public Builder setExecutionTypeRequest(
          org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder builderForValue) {
        if (executionTypeRequestBuilder_ == null) {
          executionTypeRequest_ = builderForValue.build();
        } else {
          executionTypeRequestBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
       */
      public Builder mergeExecutionTypeRequest(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto value) {
        if (executionTypeRequestBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0) &&
            executionTypeRequest_ != null &&
            executionTypeRequest_ != org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance()) {
            getExecutionTypeRequestBuilder().mergeFrom(value);
          } else {
            executionTypeRequest_ = value;
          }
        } else {
          executionTypeRequestBuilder_.mergeFrom(value);
        }
        if (executionTypeRequest_ != null) {
          bitField0_ |= 0x00000040;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
       */
      public Builder clearExecutionTypeRequest() {
        bitField0_ = (bitField0_ & ~0x00000040);
        executionTypeRequest_ = null;
        if (executionTypeRequestBuilder_ != null) {
          executionTypeRequestBuilder_.dispose();
          executionTypeRequestBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder getExecutionTypeRequestBuilder() {
        bitField0_ |= 0x00000040;
        onChanged();
        return getExecutionTypeRequestFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeRequestOrBuilder() {
        if (executionTypeRequestBuilder_ != null) {
          return executionTypeRequestBuilder_.getMessageOrBuilder();
        } else {
          return executionTypeRequest_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionTypeRequest_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto execution_type_request = 7;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder> 
          getExecutionTypeRequestFieldBuilder() {
        if (executionTypeRequestBuilder_ == null) {
          executionTypeRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder>(
                  getExecutionTypeRequest(),
                  getParentForChildren(),
                  isClean());
          executionTypeRequest_ = null;
        }
        return executionTypeRequestBuilder_;
      }

      private long allocationRequestId_ = -1L;
      /**
       * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
       * @return Whether the allocationRequestId field is set.
       */
      @java.lang.Override
      public boolean hasAllocationRequestId() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
       * @return The allocationRequestId.
       */
      @java.lang.Override
      public long getAllocationRequestId() {
        return allocationRequestId_;
      }
      /**
       * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
       * @param value The allocationRequestId to set.
       * @return This builder for chaining.
       */
      public Builder setAllocationRequestId(long value) {

        allocationRequestId_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 allocation_request_id = 8 [default = -1];</code>
       * @return This builder for chaining.
       */
      public Builder clearAllocationRequestId() {
        bitField0_ = (bitField0_ & ~0x00000080);
        allocationRequestId_ = -1L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceRequestProto>() {
      @java.lang.Override
      public ResourceRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ExecutionTypeRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ExecutionTypeRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED];</code>
     * @return Whether the executionType field is set.
     */
    boolean hasExecutionType();
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED];</code>
     * @return The executionType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType();

    /**
     * <code>optional bool enforce_execution_type = 2 [default = false];</code>
     * @return Whether the enforceExecutionType field is set.
     */
    boolean hasEnforceExecutionType();
    /**
     * <code>optional bool enforce_execution_type = 2 [default = false];</code>
     * @return The enforceExecutionType.
     */
    boolean getEnforceExecutionType();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ExecutionTypeRequestProto}
   */
  public static final class ExecutionTypeRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ExecutionTypeRequestProto)
      ExecutionTypeRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ExecutionTypeRequestProto.newBuilder() to construct.
    private ExecutionTypeRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ExecutionTypeRequestProto() {
      executionType_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ExecutionTypeRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ExecutionTypeRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int EXECUTION_TYPE_FIELD_NUMBER = 1;
    private int executionType_ = 1;
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED];</code>
     * @return Whether the executionType field is set.
     */
    @java.lang.Override public boolean hasExecutionType() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED];</code>
     * @return The executionType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
    }

    public static final int ENFORCE_EXECUTION_TYPE_FIELD_NUMBER = 2;
    private boolean enforceExecutionType_ = false;
    /**
     * <code>optional bool enforce_execution_type = 2 [default = false];</code>
     * @return Whether the enforceExecutionType field is set.
     */
    @java.lang.Override
    public boolean hasEnforceExecutionType() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional bool enforce_execution_type = 2 [default = false];</code>
     * @return The enforceExecutionType.
     */
    @java.lang.Override
    public boolean getEnforceExecutionType() {
      return enforceExecutionType_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, executionType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeBool(2, enforceExecutionType_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, executionType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(2, enforceExecutionType_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto) obj;

      if (hasExecutionType() != other.hasExecutionType()) return false;
      if (hasExecutionType()) {
        if (executionType_ != other.executionType_) return false;
      }
      if (hasEnforceExecutionType() != other.hasEnforceExecutionType()) return false;
      if (hasEnforceExecutionType()) {
        if (getEnforceExecutionType()
            != other.getEnforceExecutionType()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasExecutionType()) {
        hash = (37 * hash) + EXECUTION_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + executionType_;
      }
      if (hasEnforceExecutionType()) {
        hash = (37 * hash) + ENFORCE_EXECUTION_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getEnforceExecutionType());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ExecutionTypeRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ExecutionTypeRequestProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ExecutionTypeRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        executionType_ = 1;
        enforceExecutionType_ = false;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.executionType_ = executionType_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.enforceExecutionType_ = enforceExecutionType_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance()) return this;
        if (other.hasExecutionType()) {
          setExecutionType(other.getExecutionType());
        }
        if (other.hasEnforceExecutionType()) {
          setEnforceExecutionType(other.getEnforceExecutionType());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  executionType_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 16: {
                enforceExecutionType_ = input.readBool();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int executionType_ = 1;
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED];</code>
       * @return Whether the executionType field is set.
       */
      @java.lang.Override public boolean hasExecutionType() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED];</code>
       * @return The executionType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED];</code>
       * @param value The executionType to set.
       * @return This builder for chaining.
       */
      public Builder setExecutionType(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        executionType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto execution_type = 1 [default = GUARANTEED];</code>
       * @return This builder for chaining.
       */
      public Builder clearExecutionType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        executionType_ = 1;
        onChanged();
        return this;
      }

      private boolean enforceExecutionType_ ;
      /**
       * <code>optional bool enforce_execution_type = 2 [default = false];</code>
       * @return Whether the enforceExecutionType field is set.
       */
      @java.lang.Override
      public boolean hasEnforceExecutionType() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional bool enforce_execution_type = 2 [default = false];</code>
       * @return The enforceExecutionType.
       */
      @java.lang.Override
      public boolean getEnforceExecutionType() {
        return enforceExecutionType_;
      }
      /**
       * <code>optional bool enforce_execution_type = 2 [default = false];</code>
       * @param value The enforceExecutionType to set.
       * @return This builder for chaining.
       */
      public Builder setEnforceExecutionType(boolean value) {

        enforceExecutionType_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool enforce_execution_type = 2 [default = false];</code>
       * @return This builder for chaining.
       */
      public Builder clearEnforceExecutionType() {
        bitField0_ = (bitField0_ & ~0x00000002);
        enforceExecutionType_ = false;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ExecutionTypeRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ExecutionTypeRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ExecutionTypeRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ExecutionTypeRequestProto>() {
      @java.lang.Override
      public ExecutionTypeRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ExecutionTypeRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ExecutionTypeRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface SchedulingRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.SchedulingRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int64 allocationRequestId = 1 [default = 0];</code>
     * @return Whether the allocationRequestId field is set.
     */
    boolean hasAllocationRequestId();
    /**
     * <code>optional int64 allocationRequestId = 1 [default = 0];</code>
     * @return The allocationRequestId.
     */
    long getAllocationRequestId();

    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
     * @return Whether the priority field is set.
     */
    boolean hasPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
     * @return The priority.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
     * @return Whether the executionType field is set.
     */
    boolean hasExecutionType();
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
     * @return The executionType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionType();
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeOrBuilder();

    /**
     * <code>repeated string allocationTags = 4;</code>
     * @return A list containing the allocationTags.
     */
    java.util.List<java.lang.String>
        getAllocationTagsList();
    /**
     * <code>repeated string allocationTags = 4;</code>
     * @return The count of allocationTags.
     */
    int getAllocationTagsCount();
    /**
     * <code>repeated string allocationTags = 4;</code>
     * @param index The index of the element to return.
     * @return The allocationTags at the given index.
     */
    java.lang.String getAllocationTags(int index);
    /**
     * <code>repeated string allocationTags = 4;</code>
     * @param index The index of the value to return.
     * @return The bytes of the allocationTags at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAllocationTagsBytes(int index);

    /**
     * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
     * @return Whether the resourceSizing field is set.
     */
    boolean hasResourceSizing();
    /**
     * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
     * @return The resourceSizing.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getResourceSizing();
    /**
     * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder getResourceSizingOrBuilder();

    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
     * @return Whether the placementConstraint field is set.
     */
    boolean hasPlacementConstraint();
    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
     * @return The placementConstraint.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint();
    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.SchedulingRequestProto}
   */
  public static final class SchedulingRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.SchedulingRequestProto)
      SchedulingRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use SchedulingRequestProto.newBuilder() to construct.
    private SchedulingRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private SchedulingRequestProto() {
      allocationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new SchedulingRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SchedulingRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SchedulingRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int ALLOCATIONREQUESTID_FIELD_NUMBER = 1;
    private long allocationRequestId_ = 0L;
    /**
     * <code>optional int64 allocationRequestId = 1 [default = 0];</code>
     * @return Whether the allocationRequestId field is set.
     */
    @java.lang.Override
    public boolean hasAllocationRequestId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int64 allocationRequestId = 1 [default = 0];</code>
     * @return The allocationRequestId.
     */
    @java.lang.Override
    public long getAllocationRequestId() {
      return allocationRequestId_;
    }

    public static final int PRIORITY_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
     * @return Whether the priority field is set.
     */
    @java.lang.Override
    public boolean hasPriority() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
     * @return The priority.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }

    public static final int EXECUTIONTYPE_FIELD_NUMBER = 3;
    private org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto executionType_;
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
     * @return Whether the executionType field is set.
     */
    @java.lang.Override
    public boolean hasExecutionType() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
     * @return The executionType.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionType() {
      return executionType_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionType_;
    }
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeOrBuilder() {
      return executionType_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionType_;
    }

    public static final int ALLOCATIONTAGS_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList allocationTags_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string allocationTags = 4;</code>
     * @return A list containing the allocationTags.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getAllocationTagsList() {
      return allocationTags_;
    }
    /**
     * <code>repeated string allocationTags = 4;</code>
     * @return The count of allocationTags.
     */
    public int getAllocationTagsCount() {
      return allocationTags_.size();
    }
    /**
     * <code>repeated string allocationTags = 4;</code>
     * @param index The index of the element to return.
     * @return The allocationTags at the given index.
     */
    public java.lang.String getAllocationTags(int index) {
      return allocationTags_.get(index);
    }
    /**
     * <code>repeated string allocationTags = 4;</code>
     * @param index The index of the value to return.
     * @return The bytes of the allocationTags at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAllocationTagsBytes(int index) {
      return allocationTags_.getByteString(index);
    }

    public static final int RESOURCESIZING_FIELD_NUMBER = 5;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto resourceSizing_;
    /**
     * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
     * @return Whether the resourceSizing field is set.
     */
    @java.lang.Override
    public boolean hasResourceSizing() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
     * @return The resourceSizing.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getResourceSizing() {
      return resourceSizing_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance() : resourceSizing_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder getResourceSizingOrBuilder() {
      return resourceSizing_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance() : resourceSizing_;
    }

    public static final int PLACEMENTCONSTRAINT_FIELD_NUMBER = 6;
    private org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_;
    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
     * @return Whether the placementConstraint field is set.
     */
    @java.lang.Override
    public boolean hasPlacementConstraint() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
     * @return The placementConstraint.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() {
      return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
    }
    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() {
      return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasResourceSizing()) {
        if (!getResourceSizing().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasPlacementConstraint()) {
        if (!getPlacementConstraint().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt64(1, allocationRequestId_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getPriority());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeMessage(3, getExecutionType());
      }
      for (int i = 0; i < allocationTags_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, allocationTags_.getRaw(i));
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(5, getResourceSizing());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeMessage(6, getPlacementConstraint());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(1, allocationRequestId_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getPriority());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, getExecutionType());
      }
      {
        int dataSize = 0;
        for (int i = 0; i < allocationTags_.size(); i++) {
          dataSize += computeStringSizeNoTag(allocationTags_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getAllocationTagsList().size();
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(5, getResourceSizing());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(6, getPlacementConstraint());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto other = (org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto) obj;

      if (hasAllocationRequestId() != other.hasAllocationRequestId()) return false;
      if (hasAllocationRequestId()) {
        if (getAllocationRequestId()
            != other.getAllocationRequestId()) return false;
      }
      if (hasPriority() != other.hasPriority()) return false;
      if (hasPriority()) {
        if (!getPriority()
            .equals(other.getPriority())) return false;
      }
      if (hasExecutionType() != other.hasExecutionType()) return false;
      if (hasExecutionType()) {
        if (!getExecutionType()
            .equals(other.getExecutionType())) return false;
      }
      if (!getAllocationTagsList()
          .equals(other.getAllocationTagsList())) return false;
      if (hasResourceSizing() != other.hasResourceSizing()) return false;
      if (hasResourceSizing()) {
        if (!getResourceSizing()
            .equals(other.getResourceSizing())) return false;
      }
      if (hasPlacementConstraint() != other.hasPlacementConstraint()) return false;
      if (hasPlacementConstraint()) {
        if (!getPlacementConstraint()
            .equals(other.getPlacementConstraint())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasAllocationRequestId()) {
        hash = (37 * hash) + ALLOCATIONREQUESTID_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAllocationRequestId());
      }
      if (hasPriority()) {
        hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getPriority().hashCode();
      }
      if (hasExecutionType()) {
        hash = (37 * hash) + EXECUTIONTYPE_FIELD_NUMBER;
        hash = (53 * hash) + getExecutionType().hashCode();
      }
      if (getAllocationTagsCount() > 0) {
        hash = (37 * hash) + ALLOCATIONTAGS_FIELD_NUMBER;
        hash = (53 * hash) + getAllocationTagsList().hashCode();
      }
      if (hasResourceSizing()) {
        hash = (37 * hash) + RESOURCESIZING_FIELD_NUMBER;
        hash = (53 * hash) + getResourceSizing().hashCode();
      }
      if (hasPlacementConstraint()) {
        hash = (37 * hash) + PLACEMENTCONSTRAINT_FIELD_NUMBER;
        hash = (53 * hash) + getPlacementConstraint().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.SchedulingRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.SchedulingRequestProto)
        org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SchedulingRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SchedulingRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getPriorityFieldBuilder();
          getExecutionTypeFieldBuilder();
          getResourceSizingFieldBuilder();
          getPlacementConstraintFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        allocationRequestId_ = 0L;
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        executionType_ = null;
        if (executionTypeBuilder_ != null) {
          executionTypeBuilder_.dispose();
          executionTypeBuilder_ = null;
        }
        allocationTags_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        resourceSizing_ = null;
        if (resourceSizingBuilder_ != null) {
          resourceSizingBuilder_.dispose();
          resourceSizingBuilder_ = null;
        }
        placementConstraint_ = null;
        if (placementConstraintBuilder_ != null) {
          placementConstraintBuilder_.dispose();
          placementConstraintBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SchedulingRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.allocationRequestId_ = allocationRequestId_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.priority_ = priorityBuilder_ == null
              ? priority_
              : priorityBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.executionType_ = executionTypeBuilder_ == null
              ? executionType_
              : executionTypeBuilder_.build();
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          allocationTags_.makeImmutable();
          result.allocationTags_ = allocationTags_;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.resourceSizing_ = resourceSizingBuilder_ == null
              ? resourceSizing_
              : resourceSizingBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.placementConstraint_ = placementConstraintBuilder_ == null
              ? placementConstraint_
              : placementConstraintBuilder_.build();
          to_bitField0_ |= 0x00000010;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance()) return this;
        if (other.hasAllocationRequestId()) {
          setAllocationRequestId(other.getAllocationRequestId());
        }
        if (other.hasPriority()) {
          mergePriority(other.getPriority());
        }
        if (other.hasExecutionType()) {
          mergeExecutionType(other.getExecutionType());
        }
        if (!other.allocationTags_.isEmpty()) {
          if (allocationTags_.isEmpty()) {
            allocationTags_ = other.allocationTags_;
            bitField0_ |= 0x00000008;
          } else {
            ensureAllocationTagsIsMutable();
            allocationTags_.addAll(other.allocationTags_);
          }
          onChanged();
        }
        if (other.hasResourceSizing()) {
          mergeResourceSizing(other.getResourceSizing());
        }
        if (other.hasPlacementConstraint()) {
          mergePlacementConstraint(other.getPlacementConstraint());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasResourceSizing()) {
          if (!getResourceSizing().isInitialized()) {
            return false;
          }
        }
        if (hasPlacementConstraint()) {
          if (!getPlacementConstraint().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                allocationRequestId_ = input.readInt64();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 18: {
                input.readMessage(
                    getPriorityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                input.readMessage(
                    getExecutionTypeFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureAllocationTagsIsMutable();
                allocationTags_.add(bs);
                break;
              } // case 34
              case 42: {
                input.readMessage(
                    getResourceSizingFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 50: {
                input.readMessage(
                    getPlacementConstraintFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000020;
                break;
              } // case 50
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private long allocationRequestId_ ;
      /**
       * <code>optional int64 allocationRequestId = 1 [default = 0];</code>
       * @return Whether the allocationRequestId field is set.
       */
      @java.lang.Override
      public boolean hasAllocationRequestId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int64 allocationRequestId = 1 [default = 0];</code>
       * @return The allocationRequestId.
       */
      @java.lang.Override
      public long getAllocationRequestId() {
        return allocationRequestId_;
      }
      /**
       * <code>optional int64 allocationRequestId = 1 [default = 0];</code>
       * @param value The allocationRequestId to set.
       * @return This builder for chaining.
       */
      public Builder setAllocationRequestId(long value) {

        allocationRequestId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 allocationRequestId = 1 [default = 0];</code>
       * @return This builder for chaining.
       */
      public Builder clearAllocationRequestId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        allocationRequestId_ = 0L;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
       * @return Whether the priority field is set.
       */
      public boolean hasPriority() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
       * @return The priority.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
        if (priorityBuilder_ == null) {
          return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        } else {
          return priorityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
       */
      public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          priority_ = value;
        } else {
          priorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
       */
      public Builder setPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (priorityBuilder_ == null) {
          priority_ = builderForValue.build();
        } else {
          priorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
       */
      public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            priority_ != null &&
            priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            getPriorityBuilder().mergeFrom(value);
          } else {
            priority_ = value;
          }
        } else {
          priorityBuilder_.mergeFrom(value);
        }
        if (priority_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
       */
      public Builder clearPriority() {
        bitField0_ = (bitField0_ & ~0x00000002);
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getPriorityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
        if (priorityBuilder_ != null) {
          return priorityBuilder_.getMessageOrBuilder();
        } else {
          return priority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getPriorityFieldBuilder() {
        if (priorityBuilder_ == null) {
          priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getPriority(),
                  getParentForChildren(),
                  isClean());
          priority_ = null;
        }
        return priorityBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto executionType_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder> executionTypeBuilder_;
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
       * @return Whether the executionType field is set.
       */
      public boolean hasExecutionType() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
       * @return The executionType.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto getExecutionType() {
        if (executionTypeBuilder_ == null) {
          return executionType_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionType_;
        } else {
          return executionTypeBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
       */
      public Builder setExecutionType(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto value) {
        if (executionTypeBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          executionType_ = value;
        } else {
          executionTypeBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
       */
      public Builder setExecutionType(
          org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder builderForValue) {
        if (executionTypeBuilder_ == null) {
          executionType_ = builderForValue.build();
        } else {
          executionTypeBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
       */
      public Builder mergeExecutionType(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto value) {
        if (executionTypeBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0) &&
            executionType_ != null &&
            executionType_ != org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance()) {
            getExecutionTypeBuilder().mergeFrom(value);
          } else {
            executionType_ = value;
          }
        } else {
          executionTypeBuilder_.mergeFrom(value);
        }
        if (executionType_ != null) {
          bitField0_ |= 0x00000004;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
       */
      public Builder clearExecutionType() {
        bitField0_ = (bitField0_ & ~0x00000004);
        executionType_ = null;
        if (executionTypeBuilder_ != null) {
          executionTypeBuilder_.dispose();
          executionTypeBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder getExecutionTypeBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getExecutionTypeFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder getExecutionTypeOrBuilder() {
        if (executionTypeBuilder_ != null) {
          return executionTypeBuilder_.getMessageOrBuilder();
        } else {
          return executionType_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.getDefaultInstance() : executionType_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeRequestProto executionType = 3;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder> 
          getExecutionTypeFieldBuilder() {
        if (executionTypeBuilder_ == null) {
          executionTypeBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProtoOrBuilder>(
                  getExecutionType(),
                  getParentForChildren(),
                  isClean());
          executionType_ = null;
        }
        return executionTypeBuilder_;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList allocationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureAllocationTagsIsMutable() {
        if (!allocationTags_.isModifiable()) {
          allocationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(allocationTags_);
        }
        bitField0_ |= 0x00000008;
      }
      /**
       * <code>repeated string allocationTags = 4;</code>
       * @return A list containing the allocationTags.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getAllocationTagsList() {
        allocationTags_.makeImmutable();
        return allocationTags_;
      }
      /**
       * <code>repeated string allocationTags = 4;</code>
       * @return The count of allocationTags.
       */
      public int getAllocationTagsCount() {
        return allocationTags_.size();
      }
      /**
       * <code>repeated string allocationTags = 4;</code>
       * @param index The index of the element to return.
       * @return The allocationTags at the given index.
       */
      public java.lang.String getAllocationTags(int index) {
        return allocationTags_.get(index);
      }
      /**
       * <code>repeated string allocationTags = 4;</code>
       * @param index The index of the value to return.
       * @return The bytes of the allocationTags at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAllocationTagsBytes(int index) {
        return allocationTags_.getByteString(index);
      }
      /**
       * <code>repeated string allocationTags = 4;</code>
       * @param index The index to set the value at.
       * @param value The allocationTags to set.
       * @return This builder for chaining.
       */
      public Builder setAllocationTags(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAllocationTagsIsMutable();
        allocationTags_.set(index, value);
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocationTags = 4;</code>
       * @param value The allocationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllocationTags(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAllocationTagsIsMutable();
        allocationTags_.add(value);
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocationTags = 4;</code>
       * @param values The allocationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllAllocationTags(
          java.lang.Iterable<java.lang.String> values) {
        ensureAllocationTagsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, allocationTags_);
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocationTags = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearAllocationTags() {
        allocationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000008);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocationTags = 4;</code>
       * @param value The bytes of the allocationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllocationTagsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAllocationTagsIsMutable();
        allocationTags_.add(value);
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto resourceSizing_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder> resourceSizingBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
       * @return Whether the resourceSizing field is set.
       */
      public boolean hasResourceSizing() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
       * @return The resourceSizing.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getResourceSizing() {
        if (resourceSizingBuilder_ == null) {
          return resourceSizing_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance() : resourceSizing_;
        } else {
          return resourceSizingBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
       */
      public Builder setResourceSizing(org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto value) {
        if (resourceSizingBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resourceSizing_ = value;
        } else {
          resourceSizingBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
       */
      public Builder setResourceSizing(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder builderForValue) {
        if (resourceSizingBuilder_ == null) {
          resourceSizing_ = builderForValue.build();
        } else {
          resourceSizingBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
       */
      public Builder mergeResourceSizing(org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto value) {
        if (resourceSizingBuilder_ == null) {
          if (((bitField0_ & 0x00000010) != 0) &&
            resourceSizing_ != null &&
            resourceSizing_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance()) {
            getResourceSizingBuilder().mergeFrom(value);
          } else {
            resourceSizing_ = value;
          }
        } else {
          resourceSizingBuilder_.mergeFrom(value);
        }
        if (resourceSizing_ != null) {
          bitField0_ |= 0x00000010;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
       */
      public Builder clearResourceSizing() {
        bitField0_ = (bitField0_ & ~0x00000010);
        resourceSizing_ = null;
        if (resourceSizingBuilder_ != null) {
          resourceSizingBuilder_.dispose();
          resourceSizingBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder getResourceSizingBuilder() {
        bitField0_ |= 0x00000010;
        onChanged();
        return getResourceSizingFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder getResourceSizingOrBuilder() {
        if (resourceSizingBuilder_ != null) {
          return resourceSizingBuilder_.getMessageOrBuilder();
        } else {
          return resourceSizing_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance() : resourceSizing_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceSizingProto resourceSizing = 5;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder> 
          getResourceSizingFieldBuilder() {
        if (resourceSizingBuilder_ == null) {
          resourceSizingBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder>(
                  getResourceSizing(),
                  getParentForChildren(),
                  isClean());
          resourceSizing_ = null;
        }
        return resourceSizingBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> placementConstraintBuilder_;
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
       * @return Whether the placementConstraint field is set.
       */
      public boolean hasPlacementConstraint() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
       * @return The placementConstraint.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() {
        if (placementConstraintBuilder_ == null) {
          return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
        } else {
          return placementConstraintBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
       */
      public Builder setPlacementConstraint(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) {
        if (placementConstraintBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          placementConstraint_ = value;
        } else {
          placementConstraintBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
       */
      public Builder setPlacementConstraint(
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) {
        if (placementConstraintBuilder_ == null) {
          placementConstraint_ = builderForValue.build();
        } else {
          placementConstraintBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
       */
      public Builder mergePlacementConstraint(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) {
        if (placementConstraintBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0) &&
            placementConstraint_ != null &&
            placementConstraint_ != org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance()) {
            getPlacementConstraintBuilder().mergeFrom(value);
          } else {
            placementConstraint_ = value;
          }
        } else {
          placementConstraintBuilder_.mergeFrom(value);
        }
        if (placementConstraint_ != null) {
          bitField0_ |= 0x00000020;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
       */
      public Builder clearPlacementConstraint() {
        bitField0_ = (bitField0_ & ~0x00000020);
        placementConstraint_ = null;
        if (placementConstraintBuilder_ != null) {
          placementConstraintBuilder_.dispose();
          placementConstraintBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder getPlacementConstraintBuilder() {
        bitField0_ |= 0x00000020;
        onChanged();
        return getPlacementConstraintFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() {
        if (placementConstraintBuilder_ != null) {
          return placementConstraintBuilder_.getMessageOrBuilder();
        } else {
          return placementConstraint_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placementConstraint = 6;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> 
          getPlacementConstraintFieldBuilder() {
        if (placementConstraintBuilder_ == null) {
          placementConstraintBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder>(
                  getPlacementConstraint(),
                  getParentForChildren(),
                  isClean());
          placementConstraint_ = null;
        }
        return placementConstraintBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.SchedulingRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.SchedulingRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<SchedulingRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<SchedulingRequestProto>() {
      @java.lang.Override
      public SchedulingRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<SchedulingRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<SchedulingRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceSizingProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceSizingProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int32 numAllocations = 1;</code>
     * @return Whether the numAllocations field is set.
     */
    boolean hasNumAllocations();
    /**
     * <code>optional int32 numAllocations = 1;</code>
     * @return The numAllocations.
     */
    int getNumAllocations();

    /**
     * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
     * @return Whether the resources field is set.
     */
    boolean hasResources();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
     * @return The resources.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceSizingProto}
   */
  public static final class ResourceSizingProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceSizingProto)
      ResourceSizingProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceSizingProto.newBuilder() to construct.
    private ResourceSizingProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceSizingProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceSizingProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceSizingProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceSizingProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder.class);
    }

    private int bitField0_;
    public static final int NUMALLOCATIONS_FIELD_NUMBER = 1;
    private int numAllocations_ = 0;
    /**
     * <code>optional int32 numAllocations = 1;</code>
     * @return Whether the numAllocations field is set.
     */
    @java.lang.Override
    public boolean hasNumAllocations() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int32 numAllocations = 1;</code>
     * @return The numAllocations.
     */
    @java.lang.Override
    public int getNumAllocations() {
      return numAllocations_;
    }

    public static final int RESOURCES_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
     * @return Whether the resources field is set.
     */
    @java.lang.Override
    public boolean hasResources() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
     * @return The resources.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() {
      return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() {
      return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasResources()) {
        if (!getResources().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, numAllocations_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getResources());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, numAllocations_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getResources());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto) obj;

      if (hasNumAllocations() != other.hasNumAllocations()) return false;
      if (hasNumAllocations()) {
        if (getNumAllocations()
            != other.getNumAllocations()) return false;
      }
      if (hasResources() != other.hasResources()) return false;
      if (hasResources()) {
        if (!getResources()
            .equals(other.getResources())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNumAllocations()) {
        hash = (37 * hash) + NUMALLOCATIONS_FIELD_NUMBER;
        hash = (53 * hash) + getNumAllocations();
      }
      if (hasResources()) {
        hash = (37 * hash) + RESOURCES_FIELD_NUMBER;
        hash = (53 * hash) + getResources().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceSizingProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceSizingProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceSizingProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceSizingProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getResourcesFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        numAllocations_ = 0;
        resources_ = null;
        if (resourcesBuilder_ != null) {
          resourcesBuilder_.dispose();
          resourcesBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceSizingProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.numAllocations_ = numAllocations_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.resources_ = resourcesBuilder_ == null
              ? resources_
              : resourcesBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto.getDefaultInstance()) return this;
        if (other.hasNumAllocations()) {
          setNumAllocations(other.getNumAllocations());
        }
        if (other.hasResources()) {
          mergeResources(other.getResources());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasResources()) {
          if (!getResources().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                numAllocations_ = input.readInt32();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 18: {
                input.readMessage(
                    getResourcesFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int numAllocations_ ;
      /**
       * <code>optional int32 numAllocations = 1;</code>
       * @return Whether the numAllocations field is set.
       */
      @java.lang.Override
      public boolean hasNumAllocations() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int32 numAllocations = 1;</code>
       * @return The numAllocations.
       */
      @java.lang.Override
      public int getNumAllocations() {
        return numAllocations_;
      }
      /**
       * <code>optional int32 numAllocations = 1;</code>
       * @param value The numAllocations to set.
       * @return This builder for chaining.
       */
      public Builder setNumAllocations(int value) {

        numAllocations_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 numAllocations = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumAllocations() {
        bitField0_ = (bitField0_ & ~0x00000001);
        numAllocations_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resources_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourcesBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
       * @return Whether the resources field is set.
       */
      public boolean hasResources() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
       * @return The resources.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResources() {
        if (resourcesBuilder_ == null) {
          return resources_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
        } else {
          return resourcesBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public Builder setResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resources_ = value;
        } else {
          resourcesBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public Builder setResources(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (resourcesBuilder_ == null) {
          resources_ = builderForValue.build();
        } else {
          resourcesBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public Builder mergeResources(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourcesBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            resources_ != null &&
            resources_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getResourcesBuilder().mergeFrom(value);
          } else {
            resources_ = value;
          }
        } else {
          resourcesBuilder_.mergeFrom(value);
        }
        if (resources_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public Builder clearResources() {
        bitField0_ = (bitField0_ & ~0x00000002);
        resources_ = null;
        if (resourcesBuilder_ != null) {
          resourcesBuilder_.dispose();
          resourcesBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourcesBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getResourcesFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourcesOrBuilder() {
        if (resourcesBuilder_ != null) {
          return resourcesBuilder_.getMessageOrBuilder();
        } else {
          return resources_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resources_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resources = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getResourcesFieldBuilder() {
        if (resourcesBuilder_ == null) {
          resourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getResources(),
                  getParentForChildren(),
                  isClean());
          resources_ = null;
        }
        return resourcesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceSizingProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceSizingProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceSizingProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceSizingProto>() {
      @java.lang.Override
      public ResourceSizingProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceSizingProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceSizingProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceSizingProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface RejectedSchedulingRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.RejectedSchedulingRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.RejectionReasonProto reason = 1;</code>
     * @return Whether the reason field is set.
     */
    boolean hasReason();
    /**
     * <code>required .hadoop.yarn.RejectionReasonProto reason = 1;</code>
     * @return The reason.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto getReason();

    /**
     * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
     * @return Whether the request field is set.
     */
    boolean hasRequest();
    /**
     * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
     * @return The request.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getRequest();
    /**
     * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getRequestOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.RejectedSchedulingRequestProto}
   */
  public static final class RejectedSchedulingRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.RejectedSchedulingRequestProto)
      RejectedSchedulingRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use RejectedSchedulingRequestProto.newBuilder() to construct.
    private RejectedSchedulingRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private RejectedSchedulingRequestProto() {
      reason_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new RejectedSchedulingRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_RejectedSchedulingRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int REASON_FIELD_NUMBER = 1;
    private int reason_ = 1;
    /**
     * <code>required .hadoop.yarn.RejectionReasonProto reason = 1;</code>
     * @return Whether the reason field is set.
     */
    @java.lang.Override public boolean hasReason() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.RejectionReasonProto reason = 1;</code>
     * @return The reason.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto getReason() {
      org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto result = org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto.forNumber(reason_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto.RRP_COULD_NOT_PLACE_ON_NODE : result;
    }

    public static final int REQUEST_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto request_;
    /**
     * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
     * @return Whether the request field is set.
     */
    @java.lang.Override
    public boolean hasRequest() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
     * @return The request.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getRequest() {
      return request_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance() : request_;
    }
    /**
     * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getRequestOrBuilder() {
      return request_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance() : request_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasReason()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasRequest()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getRequest().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, reason_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getRequest());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, reason_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getRequest());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto other = (org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto) obj;

      if (hasReason() != other.hasReason()) return false;
      if (hasReason()) {
        if (reason_ != other.reason_) return false;
      }
      if (hasRequest() != other.hasRequest()) return false;
      if (hasRequest()) {
        if (!getRequest()
            .equals(other.getRequest())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasReason()) {
        hash = (37 * hash) + REASON_FIELD_NUMBER;
        hash = (53 * hash) + reason_;
      }
      if (hasRequest()) {
        hash = (37 * hash) + REQUEST_FIELD_NUMBER;
        hash = (53 * hash) + getRequest().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.RejectedSchedulingRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.RejectedSchedulingRequestProto)
        org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_RejectedSchedulingRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getRequestFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        reason_ = 1;
        request_ = null;
        if (requestBuilder_ != null) {
          requestBuilder_.dispose();
          requestBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.reason_ = reason_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.request_ = requestBuilder_ == null
              ? request_
              : requestBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto.getDefaultInstance()) return this;
        if (other.hasReason()) {
          setReason(other.getReason());
        }
        if (other.hasRequest()) {
          mergeRequest(other.getRequest());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasReason()) {
          return false;
        }
        if (!hasRequest()) {
          return false;
        }
        if (!getRequest().isInitialized()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  reason_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 18: {
                input.readMessage(
                    getRequestFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int reason_ = 1;
      /**
       * <code>required .hadoop.yarn.RejectionReasonProto reason = 1;</code>
       * @return Whether the reason field is set.
       */
      @java.lang.Override public boolean hasReason() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.RejectionReasonProto reason = 1;</code>
       * @return The reason.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto getReason() {
        org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto result = org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto.forNumber(reason_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto.RRP_COULD_NOT_PLACE_ON_NODE : result;
      }
      /**
       * <code>required .hadoop.yarn.RejectionReasonProto reason = 1;</code>
       * @param value The reason to set.
       * @return This builder for chaining.
       */
      public Builder setReason(org.apache.hadoop.yarn.proto.YarnProtos.RejectionReasonProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        reason_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.RejectionReasonProto reason = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearReason() {
        bitField0_ = (bitField0_ & ~0x00000001);
        reason_ = 1;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto request_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> requestBuilder_;
      /**
       * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
       * @return Whether the request field is set.
       */
      public boolean hasRequest() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
       * @return The request.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto getRequest() {
        if (requestBuilder_ == null) {
          return request_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance() : request_;
        } else {
          return requestBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
       */
      public Builder setRequest(org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto value) {
        if (requestBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          request_ = value;
        } else {
          requestBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
       */
      public Builder setRequest(
          org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder builderForValue) {
        if (requestBuilder_ == null) {
          request_ = builderForValue.build();
        } else {
          requestBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
       */
      public Builder mergeRequest(org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto value) {
        if (requestBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            request_ != null &&
            request_ != org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance()) {
            getRequestBuilder().mergeFrom(value);
          } else {
            request_ = value;
          }
        } else {
          requestBuilder_.mergeFrom(value);
        }
        if (request_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
       */
      public Builder clearRequest() {
        bitField0_ = (bitField0_ & ~0x00000002);
        request_ = null;
        if (requestBuilder_ != null) {
          requestBuilder_.dispose();
          requestBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder getRequestBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getRequestFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder getRequestOrBuilder() {
        if (requestBuilder_ != null) {
          return requestBuilder_.getMessageOrBuilder();
        } else {
          return request_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.getDefaultInstance() : request_;
        }
      }
      /**
       * <code>required .hadoop.yarn.SchedulingRequestProto request = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder> 
          getRequestFieldBuilder() {
        if (requestBuilder_ == null) {
          requestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SchedulingRequestProtoOrBuilder>(
                  getRequest(),
                  getParentForChildren(),
                  isClean());
          request_ = null;
        }
        return requestBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.RejectedSchedulingRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.RejectedSchedulingRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<RejectedSchedulingRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<RejectedSchedulingRequestProto>() {
      @java.lang.Override
      public RejectedSchedulingRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<RejectedSchedulingRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<RejectedSchedulingRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.RejectedSchedulingRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface PreemptionMessageProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.PreemptionMessageProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
     * @return Whether the strictContract field is set.
     */
    boolean hasStrictContract();
    /**
     * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
     * @return The strictContract.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getStrictContract();
    /**
     * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder getStrictContractOrBuilder();

    /**
     * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
     * @return Whether the contract field is set.
     */
    boolean hasContract();
    /**
     * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
     * @return The contract.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getContract();
    /**
     * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder getContractOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.PreemptionMessageProto}
   */
  public static final class PreemptionMessageProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.PreemptionMessageProto)
      PreemptionMessageProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use PreemptionMessageProto.newBuilder() to construct.
    private PreemptionMessageProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private PreemptionMessageProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new PreemptionMessageProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionMessageProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionMessageProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder.class);
    }

    private int bitField0_;
    public static final int STRICTCONTRACT_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto strictContract_;
    /**
     * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
     * @return Whether the strictContract field is set.
     */
    @java.lang.Override
    public boolean hasStrictContract() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
     * @return The strictContract.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getStrictContract() {
      return strictContract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance() : strictContract_;
    }
    /**
     * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder getStrictContractOrBuilder() {
      return strictContract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance() : strictContract_;
    }

    public static final int CONTRACT_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto contract_;
    /**
     * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
     * @return Whether the contract field is set.
     */
    @java.lang.Override
    public boolean hasContract() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
     * @return The contract.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getContract() {
      return contract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance() : contract_;
    }
    /**
     * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder getContractOrBuilder() {
      return contract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance() : contract_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasContract()) {
        if (!getContract().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getStrictContract());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getContract());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getStrictContract());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getContract());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto other = (org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto) obj;

      if (hasStrictContract() != other.hasStrictContract()) return false;
      if (hasStrictContract()) {
        if (!getStrictContract()
            .equals(other.getStrictContract())) return false;
      }
      if (hasContract() != other.hasContract()) return false;
      if (hasContract()) {
        if (!getContract()
            .equals(other.getContract())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasStrictContract()) {
        hash = (37 * hash) + STRICTCONTRACT_FIELD_NUMBER;
        hash = (53 * hash) + getStrictContract().hashCode();
      }
      if (hasContract()) {
        hash = (37 * hash) + CONTRACT_FIELD_NUMBER;
        hash = (53 * hash) + getContract().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.PreemptionMessageProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.PreemptionMessageProto)
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionMessageProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionMessageProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getStrictContractFieldBuilder();
          getContractFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        strictContract_ = null;
        if (strictContractBuilder_ != null) {
          strictContractBuilder_.dispose();
          strictContractBuilder_ = null;
        }
        contract_ = null;
        if (contractBuilder_ != null) {
          contractBuilder_.dispose();
          contractBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionMessageProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.strictContract_ = strictContractBuilder_ == null
              ? strictContract_
              : strictContractBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.contract_ = contractBuilder_ == null
              ? contract_
              : contractBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto.getDefaultInstance()) return this;
        if (other.hasStrictContract()) {
          mergeStrictContract(other.getStrictContract());
        }
        if (other.hasContract()) {
          mergeContract(other.getContract());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasContract()) {
          if (!getContract().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getStrictContractFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getContractFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto strictContract_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder> strictContractBuilder_;
      /**
       * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
       * @return Whether the strictContract field is set.
       */
      public boolean hasStrictContract() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
       * @return The strictContract.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getStrictContract() {
        if (strictContractBuilder_ == null) {
          return strictContract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance() : strictContract_;
        } else {
          return strictContractBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
       */
      public Builder setStrictContract(org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto value) {
        if (strictContractBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          strictContract_ = value;
        } else {
          strictContractBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
       */
      public Builder setStrictContract(
          org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder builderForValue) {
        if (strictContractBuilder_ == null) {
          strictContract_ = builderForValue.build();
        } else {
          strictContractBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
       */
      public Builder mergeStrictContract(org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto value) {
        if (strictContractBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            strictContract_ != null &&
            strictContract_ != org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance()) {
            getStrictContractBuilder().mergeFrom(value);
          } else {
            strictContract_ = value;
          }
        } else {
          strictContractBuilder_.mergeFrom(value);
        }
        if (strictContract_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
       */
      public Builder clearStrictContract() {
        bitField0_ = (bitField0_ & ~0x00000001);
        strictContract_ = null;
        if (strictContractBuilder_ != null) {
          strictContractBuilder_.dispose();
          strictContractBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder getStrictContractBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getStrictContractFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder getStrictContractOrBuilder() {
        if (strictContractBuilder_ != null) {
          return strictContractBuilder_.getMessageOrBuilder();
        } else {
          return strictContract_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance() : strictContract_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.StrictPreemptionContractProto strictContract = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder> 
          getStrictContractFieldBuilder() {
        if (strictContractBuilder_ == null) {
          strictContractBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder>(
                  getStrictContract(),
                  getParentForChildren(),
                  isClean());
          strictContract_ = null;
        }
        return strictContractBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto contract_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder> contractBuilder_;
      /**
       * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
       * @return Whether the contract field is set.
       */
      public boolean hasContract() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
       * @return The contract.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getContract() {
        if (contractBuilder_ == null) {
          return contract_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance() : contract_;
        } else {
          return contractBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
       */
      public Builder setContract(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto value) {
        if (contractBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          contract_ = value;
        } else {
          contractBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
       */
      public Builder setContract(
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder builderForValue) {
        if (contractBuilder_ == null) {
          contract_ = builderForValue.build();
        } else {
          contractBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
       */
      public Builder mergeContract(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto value) {
        if (contractBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            contract_ != null &&
            contract_ != org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance()) {
            getContractBuilder().mergeFrom(value);
          } else {
            contract_ = value;
          }
        } else {
          contractBuilder_.mergeFrom(value);
        }
        if (contract_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
       */
      public Builder clearContract() {
        bitField0_ = (bitField0_ & ~0x00000002);
        contract_ = null;
        if (contractBuilder_ != null) {
          contractBuilder_.dispose();
          contractBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder getContractBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getContractFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder getContractOrBuilder() {
        if (contractBuilder_ != null) {
          return contractBuilder_.getMessageOrBuilder();
        } else {
          return contract_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance() : contract_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PreemptionContractProto contract = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder> 
          getContractFieldBuilder() {
        if (contractBuilder_ == null) {
          contractBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder>(
                  getContract(),
                  getParentForChildren(),
                  isClean());
          contract_ = null;
        }
        return contractBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.PreemptionMessageProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.PreemptionMessageProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionMessageProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<PreemptionMessageProto>() {
      @java.lang.Override
      public PreemptionMessageProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionMessageProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionMessageProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StrictPreemptionContractProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StrictPreemptionContractProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> 
        getContainerList();
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index);
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
     */
    int getContainerCount();
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> 
        getContainerOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.StrictPreemptionContractProto}
   */
  public static final class StrictPreemptionContractProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StrictPreemptionContractProto)
      StrictPreemptionContractProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StrictPreemptionContractProto.newBuilder() to construct.
    private StrictPreemptionContractProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StrictPreemptionContractProto() {
      container_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StrictPreemptionContractProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StrictPreemptionContractProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder.class);
    }

    public static final int CONTAINER_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> container_;
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> getContainerList() {
      return container_;
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> 
        getContainerOrBuilderList() {
      return container_;
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
     */
    @java.lang.Override
    public int getContainerCount() {
      return container_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index) {
      return container_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder(
        int index) {
      return container_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < container_.size(); i++) {
        output.writeMessage(1, container_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < container_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, container_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto other = (org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto) obj;

      if (!getContainerList()
          .equals(other.getContainerList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getContainerCount() > 0) {
        hash = (37 * hash) + CONTAINER_FIELD_NUMBER;
        hash = (53 * hash) + getContainerList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StrictPreemptionContractProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StrictPreemptionContractProto)
        org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StrictPreemptionContractProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (containerBuilder_ == null) {
          container_ = java.util.Collections.emptyList();
        } else {
          container_ = null;
          containerBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto result) {
        if (containerBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            container_ = java.util.Collections.unmodifiableList(container_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.container_ = container_;
        } else {
          result.container_ = containerBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto.getDefaultInstance()) return this;
        if (containerBuilder_ == null) {
          if (!other.container_.isEmpty()) {
            if (container_.isEmpty()) {
              container_ = other.container_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureContainerIsMutable();
              container_.addAll(other.container_);
            }
            onChanged();
          }
        } else {
          if (!other.container_.isEmpty()) {
            if (containerBuilder_.isEmpty()) {
              containerBuilder_.dispose();
              containerBuilder_ = null;
              container_ = other.container_;
              bitField0_ = (bitField0_ & ~0x00000001);
              containerBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getContainerFieldBuilder() : null;
            } else {
              containerBuilder_.addAllMessages(other.container_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.PARSER,
                        extensionRegistry);
                if (containerBuilder_ == null) {
                  ensureContainerIsMutable();
                  container_.add(m);
                } else {
                  containerBuilder_.addMessage(m);
                }
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> container_ =
        java.util.Collections.emptyList();
      private void ensureContainerIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          container_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto>(container_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> containerBuilder_;

      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> getContainerList() {
        if (containerBuilder_ == null) {
          return java.util.Collections.unmodifiableList(container_);
        } else {
          return containerBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public int getContainerCount() {
        if (containerBuilder_ == null) {
          return container_.size();
        } else {
          return containerBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index) {
        if (containerBuilder_ == null) {
          return container_.get(index);
        } else {
          return containerBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public Builder setContainer(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) {
        if (containerBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIsMutable();
          container_.set(index, value);
          onChanged();
        } else {
          containerBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public Builder setContainer(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) {
        if (containerBuilder_ == null) {
          ensureContainerIsMutable();
          container_.set(index, builderForValue.build());
          onChanged();
        } else {
          containerBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public Builder addContainer(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) {
        if (containerBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIsMutable();
          container_.add(value);
          onChanged();
        } else {
          containerBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public Builder addContainer(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) {
        if (containerBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIsMutable();
          container_.add(index, value);
          onChanged();
        } else {
          containerBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public Builder addContainer(
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) {
        if (containerBuilder_ == null) {
          ensureContainerIsMutable();
          container_.add(builderForValue.build());
          onChanged();
        } else {
          containerBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public Builder addContainer(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) {
        if (containerBuilder_ == null) {
          ensureContainerIsMutable();
          container_.add(index, builderForValue.build());
          onChanged();
        } else {
          containerBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public Builder addAllContainer(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> values) {
        if (containerBuilder_ == null) {
          ensureContainerIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, container_);
          onChanged();
        } else {
          containerBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public Builder clearContainer() {
        if (containerBuilder_ == null) {
          container_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          containerBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public Builder removeContainer(int index) {
        if (containerBuilder_ == null) {
          ensureContainerIsMutable();
          container_.remove(index);
          onChanged();
        } else {
          containerBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder getContainerBuilder(
          int index) {
        return getContainerFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder(
          int index) {
        if (containerBuilder_ == null) {
          return container_.get(index);  } else {
          return containerBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> 
           getContainerOrBuilderList() {
        if (containerBuilder_ != null) {
          return containerBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(container_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder addContainerBuilder() {
        return getContainerFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder addContainerBuilder(
          int index) {
        return getContainerFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder> 
           getContainerBuilderList() {
        return getContainerFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> 
          getContainerFieldBuilder() {
        if (containerBuilder_ == null) {
          containerBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder>(
                  container_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          container_ = null;
        }
        return containerBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StrictPreemptionContractProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StrictPreemptionContractProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StrictPreemptionContractProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StrictPreemptionContractProto>() {
      @java.lang.Override
      public StrictPreemptionContractProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StrictPreemptionContractProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StrictPreemptionContractProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface PreemptionContractProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.PreemptionContractProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto> 
        getResourceList();
    /**
     * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getResource(int index);
    /**
     * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
     */
    int getResourceCount();
    /**
     * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder> 
        getResourceOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder getResourceOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> 
        getContainerList();
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index);
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
     */
    int getContainerCount();
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> 
        getContainerOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.PreemptionContractProto}
   */
  public static final class PreemptionContractProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.PreemptionContractProto)
      PreemptionContractProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use PreemptionContractProto.newBuilder() to construct.
    private PreemptionContractProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private PreemptionContractProto() {
      resource_ = java.util.Collections.emptyList();
      container_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new PreemptionContractProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContractProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContractProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder.class);
    }

    public static final int RESOURCE_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto> resource_;
    /**
     * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto> getResourceList() {
      return resource_;
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder> 
        getResourceOrBuilderList() {
      return resource_;
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
     */
    @java.lang.Override
    public int getResourceCount() {
      return resource_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getResource(int index) {
      return resource_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder getResourceOrBuilder(
        int index) {
      return resource_.get(index);
    }

    public static final int CONTAINER_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> container_;
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> getContainerList() {
      return container_;
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> 
        getContainerOrBuilderList() {
      return container_;
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
     */
    @java.lang.Override
    public int getContainerCount() {
      return container_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index) {
      return container_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder(
        int index) {
      return container_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getResourceCount(); i++) {
        if (!getResource(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < resource_.size(); i++) {
        output.writeMessage(1, resource_.get(i));
      }
      for (int i = 0; i < container_.size(); i++) {
        output.writeMessage(2, container_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < resource_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, resource_.get(i));
      }
      for (int i = 0; i < container_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, container_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto other = (org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto) obj;

      if (!getResourceList()
          .equals(other.getResourceList())) return false;
      if (!getContainerList()
          .equals(other.getContainerList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getResourceCount() > 0) {
        hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getResourceList().hashCode();
      }
      if (getContainerCount() > 0) {
        hash = (37 * hash) + CONTAINER_FIELD_NUMBER;
        hash = (53 * hash) + getContainerList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.PreemptionContractProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.PreemptionContractProto)
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContractProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContractProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (resourceBuilder_ == null) {
          resource_ = java.util.Collections.emptyList();
        } else {
          resource_ = null;
          resourceBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        if (containerBuilder_ == null) {
          container_ = java.util.Collections.emptyList();
        } else {
          container_ = null;
          containerBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContractProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto result) {
        if (resourceBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            resource_ = java.util.Collections.unmodifiableList(resource_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.resource_ = resource_;
        } else {
          result.resource_ = resourceBuilder_.build();
        }
        if (containerBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            container_ = java.util.Collections.unmodifiableList(container_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.container_ = container_;
        } else {
          result.container_ = containerBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto result) {
        int from_bitField0_ = bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto.getDefaultInstance()) return this;
        if (resourceBuilder_ == null) {
          if (!other.resource_.isEmpty()) {
            if (resource_.isEmpty()) {
              resource_ = other.resource_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureResourceIsMutable();
              resource_.addAll(other.resource_);
            }
            onChanged();
          }
        } else {
          if (!other.resource_.isEmpty()) {
            if (resourceBuilder_.isEmpty()) {
              resourceBuilder_.dispose();
              resourceBuilder_ = null;
              resource_ = other.resource_;
              bitField0_ = (bitField0_ & ~0x00000001);
              resourceBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getResourceFieldBuilder() : null;
            } else {
              resourceBuilder_.addAllMessages(other.resource_);
            }
          }
        }
        if (containerBuilder_ == null) {
          if (!other.container_.isEmpty()) {
            if (container_.isEmpty()) {
              container_ = other.container_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureContainerIsMutable();
              container_.addAll(other.container_);
            }
            onChanged();
          }
        } else {
          if (!other.container_.isEmpty()) {
            if (containerBuilder_.isEmpty()) {
              containerBuilder_.dispose();
              containerBuilder_ = null;
              container_ = other.container_;
              bitField0_ = (bitField0_ & ~0x00000002);
              containerBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getContainerFieldBuilder() : null;
            } else {
              containerBuilder_.addAllMessages(other.container_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getResourceCount(); i++) {
          if (!getResource(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.PARSER,
                        extensionRegistry);
                if (resourceBuilder_ == null) {
                  ensureResourceIsMutable();
                  resource_.add(m);
                } else {
                  resourceBuilder_.addMessage(m);
                }
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.PARSER,
                        extensionRegistry);
                if (containerBuilder_ == null) {
                  ensureContainerIsMutable();
                  container_.add(m);
                } else {
                  containerBuilder_.addMessage(m);
                }
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto> resource_ =
        java.util.Collections.emptyList();
      private void ensureResourceIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          resource_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto>(resource_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder> resourceBuilder_;

      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto> getResourceList() {
        if (resourceBuilder_ == null) {
          return java.util.Collections.unmodifiableList(resource_);
        } else {
          return resourceBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public int getResourceCount() {
        if (resourceBuilder_ == null) {
          return resource_.size();
        } else {
          return resourceBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getResource(int index) {
        if (resourceBuilder_ == null) {
          return resource_.get(index);
        } else {
          return resourceBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public Builder setResource(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceIsMutable();
          resource_.set(index, value);
          onChanged();
        } else {
          resourceBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public Builder setResource(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          ensureResourceIsMutable();
          resource_.set(index, builderForValue.build());
          onChanged();
        } else {
          resourceBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public Builder addResource(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceIsMutable();
          resource_.add(value);
          onChanged();
        } else {
          resourceBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public Builder addResource(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureResourceIsMutable();
          resource_.add(index, value);
          onChanged();
        } else {
          resourceBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public Builder addResource(
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          ensureResourceIsMutable();
          resource_.add(builderForValue.build());
          onChanged();
        } else {
          resourceBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public Builder addResource(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          ensureResourceIsMutable();
          resource_.add(index, builderForValue.build());
          onChanged();
        } else {
          resourceBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public Builder addAllResource(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto> values) {
        if (resourceBuilder_ == null) {
          ensureResourceIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, resource_);
          onChanged();
        } else {
          resourceBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public Builder clearResource() {
        if (resourceBuilder_ == null) {
          resource_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          resourceBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public Builder removeResource(int index) {
        if (resourceBuilder_ == null) {
          ensureResourceIsMutable();
          resource_.remove(index);
          onChanged();
        } else {
          resourceBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder getResourceBuilder(
          int index) {
        return getResourceFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder getResourceOrBuilder(
          int index) {
        if (resourceBuilder_ == null) {
          return resource_.get(index);  } else {
          return resourceBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder> 
           getResourceOrBuilderList() {
        if (resourceBuilder_ != null) {
          return resourceBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(resource_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder addResourceBuilder() {
        return getResourceFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder addResourceBuilder(
          int index) {
        return getResourceFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionResourceRequestProto resource = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder> 
           getResourceBuilderList() {
        return getResourceFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder> 
          getResourceFieldBuilder() {
        if (resourceBuilder_ == null) {
          resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder>(
                  resource_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          resource_ = null;
        }
        return resourceBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> container_ =
        java.util.Collections.emptyList();
      private void ensureContainerIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          container_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto>(container_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> containerBuilder_;

      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> getContainerList() {
        if (containerBuilder_ == null) {
          return java.util.Collections.unmodifiableList(container_);
        } else {
          return containerBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public int getContainerCount() {
        if (containerBuilder_ == null) {
          return container_.size();
        } else {
          return containerBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getContainer(int index) {
        if (containerBuilder_ == null) {
          return container_.get(index);
        } else {
          return containerBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public Builder setContainer(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) {
        if (containerBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIsMutable();
          container_.set(index, value);
          onChanged();
        } else {
          containerBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public Builder setContainer(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) {
        if (containerBuilder_ == null) {
          ensureContainerIsMutable();
          container_.set(index, builderForValue.build());
          onChanged();
        } else {
          containerBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public Builder addContainer(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) {
        if (containerBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIsMutable();
          container_.add(value);
          onChanged();
        } else {
          containerBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public Builder addContainer(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto value) {
        if (containerBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerIsMutable();
          container_.add(index, value);
          onChanged();
        } else {
          containerBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public Builder addContainer(
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) {
        if (containerBuilder_ == null) {
          ensureContainerIsMutable();
          container_.add(builderForValue.build());
          onChanged();
        } else {
          containerBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public Builder addContainer(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder builderForValue) {
        if (containerBuilder_ == null) {
          ensureContainerIsMutable();
          container_.add(index, builderForValue.build());
          onChanged();
        } else {
          containerBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public Builder addAllContainer(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto> values) {
        if (containerBuilder_ == null) {
          ensureContainerIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, container_);
          onChanged();
        } else {
          containerBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public Builder clearContainer() {
        if (containerBuilder_ == null) {
          container_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          containerBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public Builder removeContainer(int index) {
        if (containerBuilder_ == null) {
          ensureContainerIsMutable();
          container_.remove(index);
          onChanged();
        } else {
          containerBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder getContainerBuilder(
          int index) {
        return getContainerFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder getContainerOrBuilder(
          int index) {
        if (containerBuilder_ == null) {
          return container_.get(index);  } else {
          return containerBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> 
           getContainerOrBuilderList() {
        if (containerBuilder_ != null) {
          return containerBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(container_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder addContainerBuilder() {
        return getContainerFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder addContainerBuilder(
          int index) {
        return getContainerFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PreemptionContainerProto container = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder> 
           getContainerBuilderList() {
        return getContainerFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder> 
          getContainerFieldBuilder() {
        if (containerBuilder_ == null) {
          containerBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder>(
                  container_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          container_ = null;
        }
        return containerBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.PreemptionContractProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.PreemptionContractProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionContractProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<PreemptionContractProto>() {
      @java.lang.Override
      public PreemptionContractProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionContractProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionContractProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface PreemptionContainerProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.PreemptionContainerProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     * @return Whether the id field is set.
     */
    boolean hasId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     * @return The id.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.PreemptionContainerProto}
   */
  public static final class PreemptionContainerProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.PreemptionContainerProto)
      PreemptionContainerProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use PreemptionContainerProto.newBuilder() to construct.
    private PreemptionContainerProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private PreemptionContainerProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new PreemptionContainerProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContainerProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContainerProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder.class);
    }

    private int bitField0_;
    public static final int ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto id_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     * @return Whether the id field is set.
     */
    @java.lang.Override
    public boolean hasId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     * @return The id.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId() {
      return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder() {
      return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto other = (org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto) obj;

      if (hasId() != other.hasId()) return false;
      if (hasId()) {
        if (!getId()
            .equals(other.getId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + getId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.PreemptionContainerProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.PreemptionContainerProto)
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContainerProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContainerProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        id_ = null;
        if (idBuilder_ != null) {
          idBuilder_.dispose();
          idBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionContainerProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.id_ = idBuilder_ == null
              ? id_
              : idBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto.getDefaultInstance()) return this;
        if (other.hasId()) {
          mergeId(other.getId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto id_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> idBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       * @return Whether the id field is set.
       */
      public boolean hasId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       * @return The id.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getId() {
        if (idBuilder_ == null) {
          return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_;
        } else {
          return idBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public Builder setId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (idBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          id_ = value;
        } else {
          idBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public Builder setId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (idBuilder_ == null) {
          id_ = builderForValue.build();
        } else {
          idBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public Builder mergeId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (idBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            id_ != null &&
            id_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getIdBuilder().mergeFrom(value);
          } else {
            id_ = value;
          }
        } else {
          idBuilder_.mergeFrom(value);
        }
        if (id_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public Builder clearId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        id_ = null;
        if (idBuilder_ != null) {
          idBuilder_.dispose();
          idBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getIdOrBuilder() {
        if (idBuilder_ != null) {
          return idBuilder_.getMessageOrBuilder();
        } else {
          return id_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : id_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getIdFieldBuilder() {
        if (idBuilder_ == null) {
          idBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getId(),
                  getParentForChildren(),
                  isClean());
          id_ = null;
        }
        return idBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.PreemptionContainerProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.PreemptionContainerProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionContainerProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<PreemptionContainerProto>() {
      @java.lang.Override
      public PreemptionContainerProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionContainerProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionContainerProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface PreemptionResourceRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.PreemptionResourceRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
     * @return Whether the resource field is set.
     */
    boolean hasResource();
    /**
     * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
     * @return The resource.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getResource();
    /**
     * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getResourceOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.PreemptionResourceRequestProto}
   */
  public static final class PreemptionResourceRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.PreemptionResourceRequestProto)
      PreemptionResourceRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use PreemptionResourceRequestProto.newBuilder() to construct.
    private PreemptionResourceRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private PreemptionResourceRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new PreemptionResourceRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionResourceRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESOURCE_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto resource_;
    /**
     * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
     * @return Whether the resource field is set.
     */
    @java.lang.Override
    public boolean hasResource() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
     * @return The resource.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getResource() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance() : resource_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getResourceOrBuilder() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance() : resource_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasResource()) {
        if (!getResource().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getResource());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getResource());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto other = (org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto) obj;

      if (hasResource() != other.hasResource()) return false;
      if (hasResource()) {
        if (!getResource()
            .equals(other.getResource())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasResource()) {
        hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getResource().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.PreemptionResourceRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.PreemptionResourceRequestProto)
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionResourceRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getResourceFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.resource_ = resourceBuilder_ == null
              ? resource_
              : resourceBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto.getDefaultInstance()) return this;
        if (other.hasResource()) {
          mergeResource(other.getResource());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasResource()) {
          if (!getResource().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getResourceFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto resource_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> resourceBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
       * @return Whether the resource field is set.
       */
      public boolean hasResource() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
       * @return The resource.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getResource() {
        if (resourceBuilder_ == null) {
          return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance() : resource_;
        } else {
          return resourceBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
       */
      public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resource_ = value;
        } else {
          resourceBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
       */
      public Builder setResource(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          resource_ = builderForValue.build();
        } else {
          resourceBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
       */
      public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) {
        if (resourceBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            resource_ != null &&
            resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance()) {
            getResourceBuilder().mergeFrom(value);
          } else {
            resource_ = value;
          }
        } else {
          resourceBuilder_.mergeFrom(value);
        }
        if (resource_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
       */
      public Builder clearResource() {
        bitField0_ = (bitField0_ & ~0x00000001);
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder getResourceBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getResourceFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getResourceOrBuilder() {
        if (resourceBuilder_ != null) {
          return resourceBuilder_.getMessageOrBuilder();
        } else {
          return resource_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance() : resource_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceRequestProto resource = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> 
          getResourceFieldBuilder() {
        if (resourceBuilder_ == null) {
          resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder>(
                  getResource(),
                  getParentForChildren(),
                  isClean());
          resource_ = null;
        }
        return resourceBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.PreemptionResourceRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.PreemptionResourceRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionResourceRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<PreemptionResourceRequestProto>() {
      @java.lang.Override
      public PreemptionResourceRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionResourceRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<PreemptionResourceRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PreemptionResourceRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceBlacklistRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceBlacklistRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated string blacklist_additions = 1;</code>
     * @return A list containing the blacklistAdditions.
     */
    java.util.List<java.lang.String>
        getBlacklistAdditionsList();
    /**
     * <code>repeated string blacklist_additions = 1;</code>
     * @return The count of blacklistAdditions.
     */
    int getBlacklistAdditionsCount();
    /**
     * <code>repeated string blacklist_additions = 1;</code>
     * @param index The index of the element to return.
     * @return The blacklistAdditions at the given index.
     */
    java.lang.String getBlacklistAdditions(int index);
    /**
     * <code>repeated string blacklist_additions = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the blacklistAdditions at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getBlacklistAdditionsBytes(int index);

    /**
     * <code>repeated string blacklist_removals = 2;</code>
     * @return A list containing the blacklistRemovals.
     */
    java.util.List<java.lang.String>
        getBlacklistRemovalsList();
    /**
     * <code>repeated string blacklist_removals = 2;</code>
     * @return The count of blacklistRemovals.
     */
    int getBlacklistRemovalsCount();
    /**
     * <code>repeated string blacklist_removals = 2;</code>
     * @param index The index of the element to return.
     * @return The blacklistRemovals at the given index.
     */
    java.lang.String getBlacklistRemovals(int index);
    /**
     * <code>repeated string blacklist_removals = 2;</code>
     * @param index The index of the value to return.
     * @return The bytes of the blacklistRemovals at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getBlacklistRemovalsBytes(int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceBlacklistRequestProto}
   */
  public static final class ResourceBlacklistRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceBlacklistRequestProto)
      ResourceBlacklistRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceBlacklistRequestProto.newBuilder() to construct.
    private ResourceBlacklistRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceBlacklistRequestProto() {
      blacklistAdditions_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      blacklistRemovals_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceBlacklistRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceBlacklistRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder.class);
    }

    public static final int BLACKLIST_ADDITIONS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList blacklistAdditions_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string blacklist_additions = 1;</code>
     * @return A list containing the blacklistAdditions.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getBlacklistAdditionsList() {
      return blacklistAdditions_;
    }
    /**
     * <code>repeated string blacklist_additions = 1;</code>
     * @return The count of blacklistAdditions.
     */
    public int getBlacklistAdditionsCount() {
      return blacklistAdditions_.size();
    }
    /**
     * <code>repeated string blacklist_additions = 1;</code>
     * @param index The index of the element to return.
     * @return The blacklistAdditions at the given index.
     */
    public java.lang.String getBlacklistAdditions(int index) {
      return blacklistAdditions_.get(index);
    }
    /**
     * <code>repeated string blacklist_additions = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the blacklistAdditions at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getBlacklistAdditionsBytes(int index) {
      return blacklistAdditions_.getByteString(index);
    }

    public static final int BLACKLIST_REMOVALS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList blacklistRemovals_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string blacklist_removals = 2;</code>
     * @return A list containing the blacklistRemovals.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getBlacklistRemovalsList() {
      return blacklistRemovals_;
    }
    /**
     * <code>repeated string blacklist_removals = 2;</code>
     * @return The count of blacklistRemovals.
     */
    public int getBlacklistRemovalsCount() {
      return blacklistRemovals_.size();
    }
    /**
     * <code>repeated string blacklist_removals = 2;</code>
     * @param index The index of the element to return.
     * @return The blacklistRemovals at the given index.
     */
    public java.lang.String getBlacklistRemovals(int index) {
      return blacklistRemovals_.get(index);
    }
    /**
     * <code>repeated string blacklist_removals = 2;</code>
     * @param index The index of the value to return.
     * @return The bytes of the blacklistRemovals at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getBlacklistRemovalsBytes(int index) {
      return blacklistRemovals_.getByteString(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < blacklistAdditions_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, blacklistAdditions_.getRaw(i));
      }
      for (int i = 0; i < blacklistRemovals_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, blacklistRemovals_.getRaw(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      {
        int dataSize = 0;
        for (int i = 0; i < blacklistAdditions_.size(); i++) {
          dataSize += computeStringSizeNoTag(blacklistAdditions_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getBlacklistAdditionsList().size();
      }
      {
        int dataSize = 0;
        for (int i = 0; i < blacklistRemovals_.size(); i++) {
          dataSize += computeStringSizeNoTag(blacklistRemovals_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getBlacklistRemovalsList().size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto) obj;

      if (!getBlacklistAdditionsList()
          .equals(other.getBlacklistAdditionsList())) return false;
      if (!getBlacklistRemovalsList()
          .equals(other.getBlacklistRemovalsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getBlacklistAdditionsCount() > 0) {
        hash = (37 * hash) + BLACKLIST_ADDITIONS_FIELD_NUMBER;
        hash = (53 * hash) + getBlacklistAdditionsList().hashCode();
      }
      if (getBlacklistRemovalsCount() > 0) {
        hash = (37 * hash) + BLACKLIST_REMOVALS_FIELD_NUMBER;
        hash = (53 * hash) + getBlacklistRemovalsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceBlacklistRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceBlacklistRequestProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceBlacklistRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        blacklistAdditions_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        blacklistRemovals_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto result) {
        int from_bitField0_ = bitField0_;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          blacklistAdditions_.makeImmutable();
          result.blacklistAdditions_ = blacklistAdditions_;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          blacklistRemovals_.makeImmutable();
          result.blacklistRemovals_ = blacklistRemovals_;
        }
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto.getDefaultInstance()) return this;
        if (!other.blacklistAdditions_.isEmpty()) {
          if (blacklistAdditions_.isEmpty()) {
            blacklistAdditions_ = other.blacklistAdditions_;
            bitField0_ |= 0x00000001;
          } else {
            ensureBlacklistAdditionsIsMutable();
            blacklistAdditions_.addAll(other.blacklistAdditions_);
          }
          onChanged();
        }
        if (!other.blacklistRemovals_.isEmpty()) {
          if (blacklistRemovals_.isEmpty()) {
            blacklistRemovals_ = other.blacklistRemovals_;
            bitField0_ |= 0x00000002;
          } else {
            ensureBlacklistRemovalsIsMutable();
            blacklistRemovals_.addAll(other.blacklistRemovals_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureBlacklistAdditionsIsMutable();
                blacklistAdditions_.add(bs);
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureBlacklistRemovalsIsMutable();
                blacklistRemovals_.add(bs);
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList blacklistAdditions_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureBlacklistAdditionsIsMutable() {
        if (!blacklistAdditions_.isModifiable()) {
          blacklistAdditions_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(blacklistAdditions_);
        }
        bitField0_ |= 0x00000001;
      }
      /**
       * <code>repeated string blacklist_additions = 1;</code>
       * @return A list containing the blacklistAdditions.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getBlacklistAdditionsList() {
        blacklistAdditions_.makeImmutable();
        return blacklistAdditions_;
      }
      /**
       * <code>repeated string blacklist_additions = 1;</code>
       * @return The count of blacklistAdditions.
       */
      public int getBlacklistAdditionsCount() {
        return blacklistAdditions_.size();
      }
      /**
       * <code>repeated string blacklist_additions = 1;</code>
       * @param index The index of the element to return.
       * @return The blacklistAdditions at the given index.
       */
      public java.lang.String getBlacklistAdditions(int index) {
        return blacklistAdditions_.get(index);
      }
      /**
       * <code>repeated string blacklist_additions = 1;</code>
       * @param index The index of the value to return.
       * @return The bytes of the blacklistAdditions at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getBlacklistAdditionsBytes(int index) {
        return blacklistAdditions_.getByteString(index);
      }
      /**
       * <code>repeated string blacklist_additions = 1;</code>
       * @param index The index to set the value at.
       * @param value The blacklistAdditions to set.
       * @return This builder for chaining.
       */
      public Builder setBlacklistAdditions(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureBlacklistAdditionsIsMutable();
        blacklistAdditions_.set(index, value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string blacklist_additions = 1;</code>
       * @param value The blacklistAdditions to add.
       * @return This builder for chaining.
       */
      public Builder addBlacklistAdditions(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureBlacklistAdditionsIsMutable();
        blacklistAdditions_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string blacklist_additions = 1;</code>
       * @param values The blacklistAdditions to add.
       * @return This builder for chaining.
       */
      public Builder addAllBlacklistAdditions(
          java.lang.Iterable<java.lang.String> values) {
        ensureBlacklistAdditionsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, blacklistAdditions_);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string blacklist_additions = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearBlacklistAdditions() {
        blacklistAdditions_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000001);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string blacklist_additions = 1;</code>
       * @param value The bytes of the blacklistAdditions to add.
       * @return This builder for chaining.
       */
      public Builder addBlacklistAdditionsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureBlacklistAdditionsIsMutable();
        blacklistAdditions_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList blacklistRemovals_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureBlacklistRemovalsIsMutable() {
        if (!blacklistRemovals_.isModifiable()) {
          blacklistRemovals_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(blacklistRemovals_);
        }
        bitField0_ |= 0x00000002;
      }
      /**
       * <code>repeated string blacklist_removals = 2;</code>
       * @return A list containing the blacklistRemovals.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getBlacklistRemovalsList() {
        blacklistRemovals_.makeImmutable();
        return blacklistRemovals_;
      }
      /**
       * <code>repeated string blacklist_removals = 2;</code>
       * @return The count of blacklistRemovals.
       */
      public int getBlacklistRemovalsCount() {
        return blacklistRemovals_.size();
      }
      /**
       * <code>repeated string blacklist_removals = 2;</code>
       * @param index The index of the element to return.
       * @return The blacklistRemovals at the given index.
       */
      public java.lang.String getBlacklistRemovals(int index) {
        return blacklistRemovals_.get(index);
      }
      /**
       * <code>repeated string blacklist_removals = 2;</code>
       * @param index The index of the value to return.
       * @return The bytes of the blacklistRemovals at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getBlacklistRemovalsBytes(int index) {
        return blacklistRemovals_.getByteString(index);
      }
      /**
       * <code>repeated string blacklist_removals = 2;</code>
       * @param index The index to set the value at.
       * @param value The blacklistRemovals to set.
       * @return This builder for chaining.
       */
      public Builder setBlacklistRemovals(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureBlacklistRemovalsIsMutable();
        blacklistRemovals_.set(index, value);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string blacklist_removals = 2;</code>
       * @param value The blacklistRemovals to add.
       * @return This builder for chaining.
       */
      public Builder addBlacklistRemovals(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureBlacklistRemovalsIsMutable();
        blacklistRemovals_.add(value);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string blacklist_removals = 2;</code>
       * @param values The blacklistRemovals to add.
       * @return This builder for chaining.
       */
      public Builder addAllBlacklistRemovals(
          java.lang.Iterable<java.lang.String> values) {
        ensureBlacklistRemovalsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, blacklistRemovals_);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string blacklist_removals = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearBlacklistRemovals() {
        blacklistRemovals_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000002);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string blacklist_removals = 2;</code>
       * @param value The bytes of the blacklistRemovals to add.
       * @return This builder for chaining.
       */
      public Builder addBlacklistRemovalsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureBlacklistRemovalsIsMutable();
        blacklistRemovals_.add(value);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceBlacklistRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceBlacklistRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceBlacklistRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceBlacklistRequestProto>() {
      @java.lang.Override
      public ResourceBlacklistRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceBlacklistRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceBlacklistRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceBlacklistRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationSubmissionContextProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationSubmissionContextProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    boolean hasApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder();

    /**
     * <code>optional string application_name = 2 [default = "N/A"];</code>
     * @return Whether the applicationName field is set.
     */
    boolean hasApplicationName();
    /**
     * <code>optional string application_name = 2 [default = "N/A"];</code>
     * @return The applicationName.
     */
    java.lang.String getApplicationName();
    /**
     * <code>optional string application_name = 2 [default = "N/A"];</code>
     * @return The bytes for applicationName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationNameBytes();

    /**
     * <code>optional string queue = 3 [default = "default"];</code>
     * @return Whether the queue field is set.
     */
    boolean hasQueue();
    /**
     * <code>optional string queue = 3 [default = "default"];</code>
     * @return The queue.
     */
    java.lang.String getQueue();
    /**
     * <code>optional string queue = 3 [default = "default"];</code>
     * @return The bytes for queue.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes();

    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     * @return Whether the priority field is set.
     */
    boolean hasPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     * @return The priority.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
     * @return Whether the amContainerSpec field is set.
     */
    boolean hasAmContainerSpec();
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
     * @return The amContainerSpec.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getAmContainerSpec();
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getAmContainerSpecOrBuilder();

    /**
     * <code>optional bool cancel_tokens_when_complete = 6 [default = true];</code>
     * @return Whether the cancelTokensWhenComplete field is set.
     */
    boolean hasCancelTokensWhenComplete();
    /**
     * <code>optional bool cancel_tokens_when_complete = 6 [default = true];</code>
     * @return The cancelTokensWhenComplete.
     */
    boolean getCancelTokensWhenComplete();

    /**
     * <code>optional bool unmanaged_am = 7 [default = false];</code>
     * @return Whether the unmanagedAm field is set.
     */
    boolean hasUnmanagedAm();
    /**
     * <code>optional bool unmanaged_am = 7 [default = false];</code>
     * @return The unmanagedAm.
     */
    boolean getUnmanagedAm();

    /**
     * <code>optional int32 maxAppAttempts = 8 [default = 0];</code>
     * @return Whether the maxAppAttempts field is set.
     */
    boolean hasMaxAppAttempts();
    /**
     * <code>optional int32 maxAppAttempts = 8 [default = 0];</code>
     * @return The maxAppAttempts.
     */
    int getMaxAppAttempts();

    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
     * @return Whether the resource field is set.
     */
    boolean hasResource();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
     * @return The resource.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder();

    /**
     * <code>optional string applicationType = 10 [default = "YARN"];</code>
     * @return Whether the applicationType field is set.
     */
    boolean hasApplicationType();
    /**
     * <code>optional string applicationType = 10 [default = "YARN"];</code>
     * @return The applicationType.
     */
    java.lang.String getApplicationType();
    /**
     * <code>optional string applicationType = 10 [default = "YARN"];</code>
     * @return The bytes for applicationType.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTypeBytes();

    /**
     * <code>optional bool keep_containers_across_application_attempts = 11 [default = false];</code>
     * @return Whether the keepContainersAcrossApplicationAttempts field is set.
     */
    boolean hasKeepContainersAcrossApplicationAttempts();
    /**
     * <code>optional bool keep_containers_across_application_attempts = 11 [default = false];</code>
     * @return The keepContainersAcrossApplicationAttempts.
     */
    boolean getKeepContainersAcrossApplicationAttempts();

    /**
     * <code>repeated string applicationTags = 12;</code>
     * @return A list containing the applicationTags.
     */
    java.util.List<java.lang.String>
        getApplicationTagsList();
    /**
     * <code>repeated string applicationTags = 12;</code>
     * @return The count of applicationTags.
     */
    int getApplicationTagsCount();
    /**
     * <code>repeated string applicationTags = 12;</code>
     * @param index The index of the element to return.
     * @return The applicationTags at the given index.
     */
    java.lang.String getApplicationTags(int index);
    /**
     * <code>repeated string applicationTags = 12;</code>
     * @param index The index of the value to return.
     * @return The bytes of the applicationTags at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTagsBytes(int index);

    /**
     * <code>optional int64 attempt_failures_validity_interval = 13 [default = -1];</code>
     * @return Whether the attemptFailuresValidityInterval field is set.
     */
    boolean hasAttemptFailuresValidityInterval();
    /**
     * <code>optional int64 attempt_failures_validity_interval = 13 [default = -1];</code>
     * @return The attemptFailuresValidityInterval.
     */
    long getAttemptFailuresValidityInterval();

    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
     * @return Whether the logAggregationContext field is set.
     */
    boolean hasLogAggregationContext();
    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
     * @return The logAggregationContext.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext();
    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
     * @return Whether the reservationId field is set.
     */
    boolean hasReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
     * @return The reservationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder();

    /**
     * <code>optional string node_label_expression = 16;</code>
     * @return Whether the nodeLabelExpression field is set.
     */
    boolean hasNodeLabelExpression();
    /**
     * <code>optional string node_label_expression = 16;</code>
     * @return The nodeLabelExpression.
     */
    java.lang.String getNodeLabelExpression();
    /**
     * <code>optional string node_label_expression = 16;</code>
     * @return The bytes for nodeLabelExpression.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelExpressionBytes();

    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> 
        getAmContainerResourceRequestList();
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAmContainerResourceRequest(int index);
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
     */
    int getAmContainerResourceRequestCount();
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> 
        getAmContainerResourceRequestOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAmContainerResourceRequestOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto> 
        getApplicationTimeoutsList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getApplicationTimeouts(int index);
    /**
     * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
     */
    int getApplicationTimeoutsCount();
    /**
     * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder> 
        getApplicationTimeoutsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> 
        getApplicationSchedulingPropertiesList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getApplicationSchedulingProperties(int index);
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
     */
    int getApplicationSchedulingPropertiesCount();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getApplicationSchedulingPropertiesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getApplicationSchedulingPropertiesOrBuilder(
        int index);
  }
  /**
   * <pre>
   *&#47;/////////////////////////////////////////////////////////////////////
   * //// From client_RM_Protocol /////////////////////////////////////////
   * //////////////////////////////////////////////////////////////////////
   * </pre>
   *
   * Protobuf type {@code hadoop.yarn.ApplicationSubmissionContextProto}
   */
  public static final class ApplicationSubmissionContextProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationSubmissionContextProto)
      ApplicationSubmissionContextProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationSubmissionContextProto.newBuilder() to construct.
    private ApplicationSubmissionContextProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ApplicationSubmissionContextProto() {
      applicationName_ = "N/A";
      queue_ = "default";
      cancelTokensWhenComplete_ = true;
      applicationType_ = "YARN";
      applicationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      attemptFailuresValidityInterval_ = -1L;
      nodeLabelExpression_ = "";
      amContainerResourceRequest_ = java.util.Collections.emptyList();
      applicationTimeouts_ = java.util.Collections.emptyList();
      applicationSchedulingProperties_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ApplicationSubmissionContextProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationSubmissionContextProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return Whether the applicationId field is set.
     */
    @java.lang.Override
    public boolean hasApplicationId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     * @return The applicationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
      return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
    }

    public static final int APPLICATION_NAME_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object applicationName_ = "N/A";
    /**
     * <code>optional string application_name = 2 [default = "N/A"];</code>
     * @return Whether the applicationName field is set.
     */
    @java.lang.Override
    public boolean hasApplicationName() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string application_name = 2 [default = "N/A"];</code>
     * @return The applicationName.
     */
    @java.lang.Override
    public java.lang.String getApplicationName() {
      java.lang.Object ref = applicationName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          applicationName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string application_name = 2 [default = "N/A"];</code>
     * @return The bytes for applicationName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationNameBytes() {
      java.lang.Object ref = applicationName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        applicationName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int QUEUE_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object queue_ = "default";
    /**
     * <code>optional string queue = 3 [default = "default"];</code>
     * @return Whether the queue field is set.
     */
    @java.lang.Override
    public boolean hasQueue() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string queue = 3 [default = "default"];</code>
     * @return The queue.
     */
    @java.lang.Override
    public java.lang.String getQueue() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queue_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string queue = 3 [default = "default"];</code>
     * @return The bytes for queue.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueBytes() {
      java.lang.Object ref = queue_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queue_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int PRIORITY_FIELD_NUMBER = 4;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     * @return Whether the priority field is set.
     */
    @java.lang.Override
    public boolean hasPriority() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     * @return The priority.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }

    public static final int AM_CONTAINER_SPEC_FIELD_NUMBER = 5;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto amContainerSpec_;
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
     * @return Whether the amContainerSpec field is set.
     */
    @java.lang.Override
    public boolean hasAmContainerSpec() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
     * @return The amContainerSpec.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getAmContainerSpec() {
      return amContainerSpec_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : amContainerSpec_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getAmContainerSpecOrBuilder() {
      return amContainerSpec_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : amContainerSpec_;
    }

    public static final int CANCEL_TOKENS_WHEN_COMPLETE_FIELD_NUMBER = 6;
    private boolean cancelTokensWhenComplete_ = true;
    /**
     * <code>optional bool cancel_tokens_when_complete = 6 [default = true];</code>
     * @return Whether the cancelTokensWhenComplete field is set.
     */
    @java.lang.Override
    public boolean hasCancelTokensWhenComplete() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional bool cancel_tokens_when_complete = 6 [default = true];</code>
     * @return The cancelTokensWhenComplete.
     */
    @java.lang.Override
    public boolean getCancelTokensWhenComplete() {
      return cancelTokensWhenComplete_;
    }

    public static final int UNMANAGED_AM_FIELD_NUMBER = 7;
    private boolean unmanagedAm_ = false;
    /**
     * <code>optional bool unmanaged_am = 7 [default = false];</code>
     * @return Whether the unmanagedAm field is set.
     */
    @java.lang.Override
    public boolean hasUnmanagedAm() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional bool unmanaged_am = 7 [default = false];</code>
     * @return The unmanagedAm.
     */
    @java.lang.Override
    public boolean getUnmanagedAm() {
      return unmanagedAm_;
    }

    public static final int MAXAPPATTEMPTS_FIELD_NUMBER = 8;
    private int maxAppAttempts_ = 0;
    /**
     * <code>optional int32 maxAppAttempts = 8 [default = 0];</code>
     * @return Whether the maxAppAttempts field is set.
     */
    @java.lang.Override
    public boolean hasMaxAppAttempts() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional int32 maxAppAttempts = 8 [default = 0];</code>
     * @return The maxAppAttempts.
     */
    @java.lang.Override
    public int getMaxAppAttempts() {
      return maxAppAttempts_;
    }

    public static final int RESOURCE_FIELD_NUMBER = 9;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
     * @return Whether the resource field is set.
     */
    @java.lang.Override
    public boolean hasResource() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
     * @return The resource.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
    }

    public static final int APPLICATIONTYPE_FIELD_NUMBER = 10;
    @SuppressWarnings("serial")
    private volatile java.lang.Object applicationType_ = "YARN";
    /**
     * <code>optional string applicationType = 10 [default = "YARN"];</code>
     * @return Whether the applicationType field is set.
     */
    @java.lang.Override
    public boolean hasApplicationType() {
      return ((bitField0_ & 0x00000200) != 0);
    }
    /**
     * <code>optional string applicationType = 10 [default = "YARN"];</code>
     * @return The applicationType.
     */
    @java.lang.Override
    public java.lang.String getApplicationType() {
      java.lang.Object ref = applicationType_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          applicationType_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string applicationType = 10 [default = "YARN"];</code>
     * @return The bytes for applicationType.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTypeBytes() {
      java.lang.Object ref = applicationType_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        applicationType_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int KEEP_CONTAINERS_ACROSS_APPLICATION_ATTEMPTS_FIELD_NUMBER = 11;
    private boolean keepContainersAcrossApplicationAttempts_ = false;
    /**
     * <code>optional bool keep_containers_across_application_attempts = 11 [default = false];</code>
     * @return Whether the keepContainersAcrossApplicationAttempts field is set.
     */
    @java.lang.Override
    public boolean hasKeepContainersAcrossApplicationAttempts() {
      return ((bitField0_ & 0x00000400) != 0);
    }
    /**
     * <code>optional bool keep_containers_across_application_attempts = 11 [default = false];</code>
     * @return The keepContainersAcrossApplicationAttempts.
     */
    @java.lang.Override
    public boolean getKeepContainersAcrossApplicationAttempts() {
      return keepContainersAcrossApplicationAttempts_;
    }

    public static final int APPLICATIONTAGS_FIELD_NUMBER = 12;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList applicationTags_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string applicationTags = 12;</code>
     * @return A list containing the applicationTags.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getApplicationTagsList() {
      return applicationTags_;
    }
    /**
     * <code>repeated string applicationTags = 12;</code>
     * @return The count of applicationTags.
     */
    public int getApplicationTagsCount() {
      return applicationTags_.size();
    }
    /**
     * <code>repeated string applicationTags = 12;</code>
     * @param index The index of the element to return.
     * @return The applicationTags at the given index.
     */
    public java.lang.String getApplicationTags(int index) {
      return applicationTags_.get(index);
    }
    /**
     * <code>repeated string applicationTags = 12;</code>
     * @param index The index of the value to return.
     * @return The bytes of the applicationTags at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getApplicationTagsBytes(int index) {
      return applicationTags_.getByteString(index);
    }

    public static final int ATTEMPT_FAILURES_VALIDITY_INTERVAL_FIELD_NUMBER = 13;
    private long attemptFailuresValidityInterval_ = -1L;
    /**
     * <code>optional int64 attempt_failures_validity_interval = 13 [default = -1];</code>
     * @return Whether the attemptFailuresValidityInterval field is set.
     */
    @java.lang.Override
    public boolean hasAttemptFailuresValidityInterval() {
      return ((bitField0_ & 0x00000800) != 0);
    }
    /**
     * <code>optional int64 attempt_failures_validity_interval = 13 [default = -1];</code>
     * @return The attemptFailuresValidityInterval.
     */
    @java.lang.Override
    public long getAttemptFailuresValidityInterval() {
      return attemptFailuresValidityInterval_;
    }

    public static final int LOG_AGGREGATION_CONTEXT_FIELD_NUMBER = 14;
    private org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto logAggregationContext_;
    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
     * @return Whether the logAggregationContext field is set.
     */
    @java.lang.Override
    public boolean hasLogAggregationContext() {
      return ((bitField0_ & 0x00001000) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
     * @return The logAggregationContext.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext() {
      return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
    }
    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder() {
      return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
    }

    public static final int RESERVATION_ID_FIELD_NUMBER = 15;
    private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
     * @return Whether the reservationId field is set.
     */
    @java.lang.Override
    public boolean hasReservationId() {
      return ((bitField0_ & 0x00002000) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
     * @return The reservationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }

    public static final int NODE_LABEL_EXPRESSION_FIELD_NUMBER = 16;
    @SuppressWarnings("serial")
    private volatile java.lang.Object nodeLabelExpression_ = "";
    /**
     * <code>optional string node_label_expression = 16;</code>
     * @return Whether the nodeLabelExpression field is set.
     */
    @java.lang.Override
    public boolean hasNodeLabelExpression() {
      return ((bitField0_ & 0x00004000) != 0);
    }
    /**
     * <code>optional string node_label_expression = 16;</code>
     * @return The nodeLabelExpression.
     */
    @java.lang.Override
    public java.lang.String getNodeLabelExpression() {
      java.lang.Object ref = nodeLabelExpression_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          nodeLabelExpression_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string node_label_expression = 16;</code>
     * @return The bytes for nodeLabelExpression.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNodeLabelExpressionBytes() {
      java.lang.Object ref = nodeLabelExpression_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        nodeLabelExpression_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int AM_CONTAINER_RESOURCE_REQUEST_FIELD_NUMBER = 17;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> amContainerResourceRequest_;
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> getAmContainerResourceRequestList() {
      return amContainerResourceRequest_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> 
        getAmContainerResourceRequestOrBuilderList() {
      return amContainerResourceRequest_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
     */
    @java.lang.Override
    public int getAmContainerResourceRequestCount() {
      return amContainerResourceRequest_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAmContainerResourceRequest(int index) {
      return amContainerResourceRequest_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAmContainerResourceRequestOrBuilder(
        int index) {
      return amContainerResourceRequest_.get(index);
    }

    public static final int APPLICATION_TIMEOUTS_FIELD_NUMBER = 18;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto> applicationTimeouts_;
    /**
     * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto> getApplicationTimeoutsList() {
      return applicationTimeouts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder> 
        getApplicationTimeoutsOrBuilderList() {
      return applicationTimeouts_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
     */
    @java.lang.Override
    public int getApplicationTimeoutsCount() {
      return applicationTimeouts_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getApplicationTimeouts(int index) {
      return applicationTimeouts_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder(
        int index) {
      return applicationTimeouts_.get(index);
    }

    public static final int APPLICATION_SCHEDULING_PROPERTIES_FIELD_NUMBER = 19;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> applicationSchedulingProperties_;
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getApplicationSchedulingPropertiesList() {
      return applicationSchedulingProperties_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getApplicationSchedulingPropertiesOrBuilderList() {
      return applicationSchedulingProperties_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
     */
    @java.lang.Override
    public int getApplicationSchedulingPropertiesCount() {
      return applicationSchedulingProperties_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getApplicationSchedulingProperties(int index) {
      return applicationSchedulingProperties_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getApplicationSchedulingPropertiesOrBuilder(
        int index) {
      return applicationSchedulingProperties_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasResource()) {
        if (!getResource().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getAmContainerResourceRequestCount(); i++) {
        if (!getAmContainerResourceRequest(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, applicationName_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, queue_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(4, getPriority());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeMessage(5, getAmContainerSpec());
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeBool(6, cancelTokensWhenComplete_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeBool(7, unmanagedAm_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeInt32(8, maxAppAttempts_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeMessage(9, getResource());
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 10, applicationType_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        output.writeBool(11, keepContainersAcrossApplicationAttempts_);
      }
      for (int i = 0; i < applicationTags_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 12, applicationTags_.getRaw(i));
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        output.writeInt64(13, attemptFailuresValidityInterval_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        output.writeMessage(14, getLogAggregationContext());
      }
      if (((bitField0_ & 0x00002000) != 0)) {
        output.writeMessage(15, getReservationId());
      }
      if (((bitField0_ & 0x00004000) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 16, nodeLabelExpression_);
      }
      for (int i = 0; i < amContainerResourceRequest_.size(); i++) {
        output.writeMessage(17, amContainerResourceRequest_.get(i));
      }
      for (int i = 0; i < applicationTimeouts_.size(); i++) {
        output.writeMessage(18, applicationTimeouts_.get(i));
      }
      for (int i = 0; i < applicationSchedulingProperties_.size(); i++) {
        output.writeMessage(19, applicationSchedulingProperties_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getApplicationId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, applicationName_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, queue_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, getPriority());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(5, getAmContainerSpec());
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(6, cancelTokensWhenComplete_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(7, unmanagedAm_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(8, maxAppAttempts_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(9, getResource());
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(10, applicationType_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(11, keepContainersAcrossApplicationAttempts_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < applicationTags_.size(); i++) {
          dataSize += computeStringSizeNoTag(applicationTags_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getApplicationTagsList().size();
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(13, attemptFailuresValidityInterval_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(14, getLogAggregationContext());
      }
      if (((bitField0_ & 0x00002000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(15, getReservationId());
      }
      if (((bitField0_ & 0x00004000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(16, nodeLabelExpression_);
      }
      for (int i = 0; i < amContainerResourceRequest_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(17, amContainerResourceRequest_.get(i));
      }
      for (int i = 0; i < applicationTimeouts_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(18, applicationTimeouts_.get(i));
      }
      for (int i = 0; i < applicationSchedulingProperties_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(19, applicationSchedulingProperties_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto) obj;

      if (hasApplicationId() != other.hasApplicationId()) return false;
      if (hasApplicationId()) {
        if (!getApplicationId()
            .equals(other.getApplicationId())) return false;
      }
      if (hasApplicationName() != other.hasApplicationName()) return false;
      if (hasApplicationName()) {
        if (!getApplicationName()
            .equals(other.getApplicationName())) return false;
      }
      if (hasQueue() != other.hasQueue()) return false;
      if (hasQueue()) {
        if (!getQueue()
            .equals(other.getQueue())) return false;
      }
      if (hasPriority() != other.hasPriority()) return false;
      if (hasPriority()) {
        if (!getPriority()
            .equals(other.getPriority())) return false;
      }
      if (hasAmContainerSpec() != other.hasAmContainerSpec()) return false;
      if (hasAmContainerSpec()) {
        if (!getAmContainerSpec()
            .equals(other.getAmContainerSpec())) return false;
      }
      if (hasCancelTokensWhenComplete() != other.hasCancelTokensWhenComplete()) return false;
      if (hasCancelTokensWhenComplete()) {
        if (getCancelTokensWhenComplete()
            != other.getCancelTokensWhenComplete()) return false;
      }
      if (hasUnmanagedAm() != other.hasUnmanagedAm()) return false;
      if (hasUnmanagedAm()) {
        if (getUnmanagedAm()
            != other.getUnmanagedAm()) return false;
      }
      if (hasMaxAppAttempts() != other.hasMaxAppAttempts()) return false;
      if (hasMaxAppAttempts()) {
        if (getMaxAppAttempts()
            != other.getMaxAppAttempts()) return false;
      }
      if (hasResource() != other.hasResource()) return false;
      if (hasResource()) {
        if (!getResource()
            .equals(other.getResource())) return false;
      }
      if (hasApplicationType() != other.hasApplicationType()) return false;
      if (hasApplicationType()) {
        if (!getApplicationType()
            .equals(other.getApplicationType())) return false;
      }
      if (hasKeepContainersAcrossApplicationAttempts() != other.hasKeepContainersAcrossApplicationAttempts()) return false;
      if (hasKeepContainersAcrossApplicationAttempts()) {
        if (getKeepContainersAcrossApplicationAttempts()
            != other.getKeepContainersAcrossApplicationAttempts()) return false;
      }
      if (!getApplicationTagsList()
          .equals(other.getApplicationTagsList())) return false;
      if (hasAttemptFailuresValidityInterval() != other.hasAttemptFailuresValidityInterval()) return false;
      if (hasAttemptFailuresValidityInterval()) {
        if (getAttemptFailuresValidityInterval()
            != other.getAttemptFailuresValidityInterval()) return false;
      }
      if (hasLogAggregationContext() != other.hasLogAggregationContext()) return false;
      if (hasLogAggregationContext()) {
        if (!getLogAggregationContext()
            .equals(other.getLogAggregationContext())) return false;
      }
      if (hasReservationId() != other.hasReservationId()) return false;
      if (hasReservationId()) {
        if (!getReservationId()
            .equals(other.getReservationId())) return false;
      }
      if (hasNodeLabelExpression() != other.hasNodeLabelExpression()) return false;
      if (hasNodeLabelExpression()) {
        if (!getNodeLabelExpression()
            .equals(other.getNodeLabelExpression())) return false;
      }
      if (!getAmContainerResourceRequestList()
          .equals(other.getAmContainerResourceRequestList())) return false;
      if (!getApplicationTimeoutsList()
          .equals(other.getApplicationTimeoutsList())) return false;
      if (!getApplicationSchedulingPropertiesList()
          .equals(other.getApplicationSchedulingPropertiesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationId()) {
        hash = (37 * hash) + APPLICATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationId().hashCode();
      }
      if (hasApplicationName()) {
        hash = (37 * hash) + APPLICATION_NAME_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationName().hashCode();
      }
      if (hasQueue()) {
        hash = (37 * hash) + QUEUE_FIELD_NUMBER;
        hash = (53 * hash) + getQueue().hashCode();
      }
      if (hasPriority()) {
        hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getPriority().hashCode();
      }
      if (hasAmContainerSpec()) {
        hash = (37 * hash) + AM_CONTAINER_SPEC_FIELD_NUMBER;
        hash = (53 * hash) + getAmContainerSpec().hashCode();
      }
      if (hasCancelTokensWhenComplete()) {
        hash = (37 * hash) + CANCEL_TOKENS_WHEN_COMPLETE_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getCancelTokensWhenComplete());
      }
      if (hasUnmanagedAm()) {
        hash = (37 * hash) + UNMANAGED_AM_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getUnmanagedAm());
      }
      if (hasMaxAppAttempts()) {
        hash = (37 * hash) + MAXAPPATTEMPTS_FIELD_NUMBER;
        hash = (53 * hash) + getMaxAppAttempts();
      }
      if (hasResource()) {
        hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getResource().hashCode();
      }
      if (hasApplicationType()) {
        hash = (37 * hash) + APPLICATIONTYPE_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationType().hashCode();
      }
      if (hasKeepContainersAcrossApplicationAttempts()) {
        hash = (37 * hash) + KEEP_CONTAINERS_ACROSS_APPLICATION_ATTEMPTS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getKeepContainersAcrossApplicationAttempts());
      }
      if (getApplicationTagsCount() > 0) {
        hash = (37 * hash) + APPLICATIONTAGS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationTagsList().hashCode();
      }
      if (hasAttemptFailuresValidityInterval()) {
        hash = (37 * hash) + ATTEMPT_FAILURES_VALIDITY_INTERVAL_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAttemptFailuresValidityInterval());
      }
      if (hasLogAggregationContext()) {
        hash = (37 * hash) + LOG_AGGREGATION_CONTEXT_FIELD_NUMBER;
        hash = (53 * hash) + getLogAggregationContext().hashCode();
      }
      if (hasReservationId()) {
        hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getReservationId().hashCode();
      }
      if (hasNodeLabelExpression()) {
        hash = (37 * hash) + NODE_LABEL_EXPRESSION_FIELD_NUMBER;
        hash = (53 * hash) + getNodeLabelExpression().hashCode();
      }
      if (getAmContainerResourceRequestCount() > 0) {
        hash = (37 * hash) + AM_CONTAINER_RESOURCE_REQUEST_FIELD_NUMBER;
        hash = (53 * hash) + getAmContainerResourceRequestList().hashCode();
      }
      if (getApplicationTimeoutsCount() > 0) {
        hash = (37 * hash) + APPLICATION_TIMEOUTS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationTimeoutsList().hashCode();
      }
      if (getApplicationSchedulingPropertiesCount() > 0) {
        hash = (37 * hash) + APPLICATION_SCHEDULING_PROPERTIES_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationSchedulingPropertiesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * <pre>
     *&#47;/////////////////////////////////////////////////////////////////////
     * //// From client_RM_Protocol /////////////////////////////////////////
     * //////////////////////////////////////////////////////////////////////
     * </pre>
     *
     * Protobuf type {@code hadoop.yarn.ApplicationSubmissionContextProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationSubmissionContextProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationSubmissionContextProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getApplicationIdFieldBuilder();
          getPriorityFieldBuilder();
          getAmContainerSpecFieldBuilder();
          getResourceFieldBuilder();
          getLogAggregationContextFieldBuilder();
          getReservationIdFieldBuilder();
          getAmContainerResourceRequestFieldBuilder();
          getApplicationTimeoutsFieldBuilder();
          getApplicationSchedulingPropertiesFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        applicationName_ = "N/A";
        queue_ = "default";
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        amContainerSpec_ = null;
        if (amContainerSpecBuilder_ != null) {
          amContainerSpecBuilder_.dispose();
          amContainerSpecBuilder_ = null;
        }
        cancelTokensWhenComplete_ = true;
        unmanagedAm_ = false;
        maxAppAttempts_ = 0;
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        applicationType_ = "YARN";
        keepContainersAcrossApplicationAttempts_ = false;
        applicationTags_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        attemptFailuresValidityInterval_ = -1L;
        logAggregationContext_ = null;
        if (logAggregationContextBuilder_ != null) {
          logAggregationContextBuilder_.dispose();
          logAggregationContextBuilder_ = null;
        }
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        nodeLabelExpression_ = "";
        if (amContainerResourceRequestBuilder_ == null) {
          amContainerResourceRequest_ = java.util.Collections.emptyList();
        } else {
          amContainerResourceRequest_ = null;
          amContainerResourceRequestBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00010000);
        if (applicationTimeoutsBuilder_ == null) {
          applicationTimeouts_ = java.util.Collections.emptyList();
        } else {
          applicationTimeouts_ = null;
          applicationTimeoutsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00020000);
        if (applicationSchedulingPropertiesBuilder_ == null) {
          applicationSchedulingProperties_ = java.util.Collections.emptyList();
        } else {
          applicationSchedulingProperties_ = null;
          applicationSchedulingPropertiesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00040000);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto result) {
        if (amContainerResourceRequestBuilder_ == null) {
          if (((bitField0_ & 0x00010000) != 0)) {
            amContainerResourceRequest_ = java.util.Collections.unmodifiableList(amContainerResourceRequest_);
            bitField0_ = (bitField0_ & ~0x00010000);
          }
          result.amContainerResourceRequest_ = amContainerResourceRequest_;
        } else {
          result.amContainerResourceRequest_ = amContainerResourceRequestBuilder_.build();
        }
        if (applicationTimeoutsBuilder_ == null) {
          if (((bitField0_ & 0x00020000) != 0)) {
            applicationTimeouts_ = java.util.Collections.unmodifiableList(applicationTimeouts_);
            bitField0_ = (bitField0_ & ~0x00020000);
          }
          result.applicationTimeouts_ = applicationTimeouts_;
        } else {
          result.applicationTimeouts_ = applicationTimeoutsBuilder_.build();
        }
        if (applicationSchedulingPropertiesBuilder_ == null) {
          if (((bitField0_ & 0x00040000) != 0)) {
            applicationSchedulingProperties_ = java.util.Collections.unmodifiableList(applicationSchedulingProperties_);
            bitField0_ = (bitField0_ & ~0x00040000);
          }
          result.applicationSchedulingProperties_ = applicationSchedulingProperties_;
        } else {
          result.applicationSchedulingProperties_ = applicationSchedulingPropertiesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationId_ = applicationIdBuilder_ == null
              ? applicationId_
              : applicationIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.applicationName_ = applicationName_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.queue_ = queue_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.priority_ = priorityBuilder_ == null
              ? priority_
              : priorityBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.amContainerSpec_ = amContainerSpecBuilder_ == null
              ? amContainerSpec_
              : amContainerSpecBuilder_.build();
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.cancelTokensWhenComplete_ = cancelTokensWhenComplete_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.unmanagedAm_ = unmanagedAm_;
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.maxAppAttempts_ = maxAppAttempts_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.resource_ = resourceBuilder_ == null
              ? resource_
              : resourceBuilder_.build();
          to_bitField0_ |= 0x00000100;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          result.applicationType_ = applicationType_;
          to_bitField0_ |= 0x00000200;
        }
        if (((from_bitField0_ & 0x00000400) != 0)) {
          result.keepContainersAcrossApplicationAttempts_ = keepContainersAcrossApplicationAttempts_;
          to_bitField0_ |= 0x00000400;
        }
        if (((from_bitField0_ & 0x00000800) != 0)) {
          applicationTags_.makeImmutable();
          result.applicationTags_ = applicationTags_;
        }
        if (((from_bitField0_ & 0x00001000) != 0)) {
          result.attemptFailuresValidityInterval_ = attemptFailuresValidityInterval_;
          to_bitField0_ |= 0x00000800;
        }
        if (((from_bitField0_ & 0x00002000) != 0)) {
          result.logAggregationContext_ = logAggregationContextBuilder_ == null
              ? logAggregationContext_
              : logAggregationContextBuilder_.build();
          to_bitField0_ |= 0x00001000;
        }
        if (((from_bitField0_ & 0x00004000) != 0)) {
          result.reservationId_ = reservationIdBuilder_ == null
              ? reservationId_
              : reservationIdBuilder_.build();
          to_bitField0_ |= 0x00002000;
        }
        if (((from_bitField0_ & 0x00008000) != 0)) {
          result.nodeLabelExpression_ = nodeLabelExpression_;
          to_bitField0_ |= 0x00004000;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto.getDefaultInstance()) return this;
        if (other.hasApplicationId()) {
          mergeApplicationId(other.getApplicationId());
        }
        if (other.hasApplicationName()) {
          applicationName_ = other.applicationName_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasQueue()) {
          queue_ = other.queue_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasPriority()) {
          mergePriority(other.getPriority());
        }
        if (other.hasAmContainerSpec()) {
          mergeAmContainerSpec(other.getAmContainerSpec());
        }
        if (other.hasCancelTokensWhenComplete()) {
          setCancelTokensWhenComplete(other.getCancelTokensWhenComplete());
        }
        if (other.hasUnmanagedAm()) {
          setUnmanagedAm(other.getUnmanagedAm());
        }
        if (other.hasMaxAppAttempts()) {
          setMaxAppAttempts(other.getMaxAppAttempts());
        }
        if (other.hasResource()) {
          mergeResource(other.getResource());
        }
        if (other.hasApplicationType()) {
          applicationType_ = other.applicationType_;
          bitField0_ |= 0x00000200;
          onChanged();
        }
        if (other.hasKeepContainersAcrossApplicationAttempts()) {
          setKeepContainersAcrossApplicationAttempts(other.getKeepContainersAcrossApplicationAttempts());
        }
        if (!other.applicationTags_.isEmpty()) {
          if (applicationTags_.isEmpty()) {
            applicationTags_ = other.applicationTags_;
            bitField0_ |= 0x00000800;
          } else {
            ensureApplicationTagsIsMutable();
            applicationTags_.addAll(other.applicationTags_);
          }
          onChanged();
        }
        if (other.hasAttemptFailuresValidityInterval()) {
          setAttemptFailuresValidityInterval(other.getAttemptFailuresValidityInterval());
        }
        if (other.hasLogAggregationContext()) {
          mergeLogAggregationContext(other.getLogAggregationContext());
        }
        if (other.hasReservationId()) {
          mergeReservationId(other.getReservationId());
        }
        if (other.hasNodeLabelExpression()) {
          nodeLabelExpression_ = other.nodeLabelExpression_;
          bitField0_ |= 0x00008000;
          onChanged();
        }
        if (amContainerResourceRequestBuilder_ == null) {
          if (!other.amContainerResourceRequest_.isEmpty()) {
            if (amContainerResourceRequest_.isEmpty()) {
              amContainerResourceRequest_ = other.amContainerResourceRequest_;
              bitField0_ = (bitField0_ & ~0x00010000);
            } else {
              ensureAmContainerResourceRequestIsMutable();
              amContainerResourceRequest_.addAll(other.amContainerResourceRequest_);
            }
            onChanged();
          }
        } else {
          if (!other.amContainerResourceRequest_.isEmpty()) {
            if (amContainerResourceRequestBuilder_.isEmpty()) {
              amContainerResourceRequestBuilder_.dispose();
              amContainerResourceRequestBuilder_ = null;
              amContainerResourceRequest_ = other.amContainerResourceRequest_;
              bitField0_ = (bitField0_ & ~0x00010000);
              amContainerResourceRequestBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getAmContainerResourceRequestFieldBuilder() : null;
            } else {
              amContainerResourceRequestBuilder_.addAllMessages(other.amContainerResourceRequest_);
            }
          }
        }
        if (applicationTimeoutsBuilder_ == null) {
          if (!other.applicationTimeouts_.isEmpty()) {
            if (applicationTimeouts_.isEmpty()) {
              applicationTimeouts_ = other.applicationTimeouts_;
              bitField0_ = (bitField0_ & ~0x00020000);
            } else {
              ensureApplicationTimeoutsIsMutable();
              applicationTimeouts_.addAll(other.applicationTimeouts_);
            }
            onChanged();
          }
        } else {
          if (!other.applicationTimeouts_.isEmpty()) {
            if (applicationTimeoutsBuilder_.isEmpty()) {
              applicationTimeoutsBuilder_.dispose();
              applicationTimeoutsBuilder_ = null;
              applicationTimeouts_ = other.applicationTimeouts_;
              bitField0_ = (bitField0_ & ~0x00020000);
              applicationTimeoutsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationTimeoutsFieldBuilder() : null;
            } else {
              applicationTimeoutsBuilder_.addAllMessages(other.applicationTimeouts_);
            }
          }
        }
        if (applicationSchedulingPropertiesBuilder_ == null) {
          if (!other.applicationSchedulingProperties_.isEmpty()) {
            if (applicationSchedulingProperties_.isEmpty()) {
              applicationSchedulingProperties_ = other.applicationSchedulingProperties_;
              bitField0_ = (bitField0_ & ~0x00040000);
            } else {
              ensureApplicationSchedulingPropertiesIsMutable();
              applicationSchedulingProperties_.addAll(other.applicationSchedulingProperties_);
            }
            onChanged();
          }
        } else {
          if (!other.applicationSchedulingProperties_.isEmpty()) {
            if (applicationSchedulingPropertiesBuilder_.isEmpty()) {
              applicationSchedulingPropertiesBuilder_.dispose();
              applicationSchedulingPropertiesBuilder_ = null;
              applicationSchedulingProperties_ = other.applicationSchedulingProperties_;
              bitField0_ = (bitField0_ & ~0x00040000);
              applicationSchedulingPropertiesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationSchedulingPropertiesFieldBuilder() : null;
            } else {
              applicationSchedulingPropertiesBuilder_.addAllMessages(other.applicationSchedulingProperties_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasResource()) {
          if (!getResource().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getAmContainerResourceRequestCount(); i++) {
          if (!getAmContainerResourceRequest(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getApplicationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                applicationName_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                queue_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                input.readMessage(
                    getPriorityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                input.readMessage(
                    getAmContainerSpecFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 48: {
                cancelTokensWhenComplete_ = input.readBool();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 56: {
                unmanagedAm_ = input.readBool();
                bitField0_ |= 0x00000040;
                break;
              } // case 56
              case 64: {
                maxAppAttempts_ = input.readInt32();
                bitField0_ |= 0x00000080;
                break;
              } // case 64
              case 74: {
                input.readMessage(
                    getResourceFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000100;
                break;
              } // case 74
              case 82: {
                applicationType_ = input.readBytes();
                bitField0_ |= 0x00000200;
                break;
              } // case 82
              case 88: {
                keepContainersAcrossApplicationAttempts_ = input.readBool();
                bitField0_ |= 0x00000400;
                break;
              } // case 88
              case 98: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureApplicationTagsIsMutable();
                applicationTags_.add(bs);
                break;
              } // case 98
              case 104: {
                attemptFailuresValidityInterval_ = input.readInt64();
                bitField0_ |= 0x00001000;
                break;
              } // case 104
              case 114: {
                input.readMessage(
                    getLogAggregationContextFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00002000;
                break;
              } // case 114
              case 122: {
                input.readMessage(
                    getReservationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00004000;
                break;
              } // case 122
              case 130: {
                nodeLabelExpression_ = input.readBytes();
                bitField0_ |= 0x00008000;
                break;
              } // case 130
              case 138: {
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.PARSER,
                        extensionRegistry);
                if (amContainerResourceRequestBuilder_ == null) {
                  ensureAmContainerResourceRequestIsMutable();
                  amContainerResourceRequest_.add(m);
                } else {
                  amContainerResourceRequestBuilder_.addMessage(m);
                }
                break;
              } // case 138
              case 146: {
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.PARSER,
                        extensionRegistry);
                if (applicationTimeoutsBuilder_ == null) {
                  ensureApplicationTimeoutsIsMutable();
                  applicationTimeouts_.add(m);
                } else {
                  applicationTimeoutsBuilder_.addMessage(m);
                }
                break;
              } // case 146
              case 154: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER,
                        extensionRegistry);
                if (applicationSchedulingPropertiesBuilder_ == null) {
                  ensureApplicationSchedulingPropertiesIsMutable();
                  applicationSchedulingProperties_.add(m);
                } else {
                  applicationSchedulingPropertiesBuilder_.addMessage(m);
                }
                break;
              } // case 154
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto applicationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> applicationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return Whether the applicationId field is set.
       */
      public boolean hasApplicationId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       * @return The applicationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getApplicationId() {
        if (applicationIdBuilder_ == null) {
          return applicationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        } else {
          return applicationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          applicationId_ = value;
        } else {
          applicationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder setApplicationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (applicationIdBuilder_ == null) {
          applicationId_ = builderForValue.build();
        } else {
          applicationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder mergeApplicationId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (applicationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            applicationId_ != null &&
            applicationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getApplicationIdBuilder().mergeFrom(value);
          } else {
            applicationId_ = value;
          }
        } else {
          applicationIdBuilder_.mergeFrom(value);
        }
        if (applicationId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public Builder clearApplicationId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationId_ = null;
        if (applicationIdBuilder_ != null) {
          applicationIdBuilder_.dispose();
          applicationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getApplicationIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getApplicationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getApplicationIdOrBuilder() {
        if (applicationIdBuilder_ != null) {
          return applicationIdBuilder_.getMessageOrBuilder();
        } else {
          return applicationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : applicationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto application_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getApplicationIdFieldBuilder() {
        if (applicationIdBuilder_ == null) {
          applicationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getApplicationId(),
                  getParentForChildren(),
                  isClean());
          applicationId_ = null;
        }
        return applicationIdBuilder_;
      }

      private java.lang.Object applicationName_ = "N/A";
      /**
       * <code>optional string application_name = 2 [default = "N/A"];</code>
       * @return Whether the applicationName field is set.
       */
      public boolean hasApplicationName() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string application_name = 2 [default = "N/A"];</code>
       * @return The applicationName.
       */
      public java.lang.String getApplicationName() {
        java.lang.Object ref = applicationName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            applicationName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string application_name = 2 [default = "N/A"];</code>
       * @return The bytes for applicationName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationNameBytes() {
        java.lang.Object ref = applicationName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          applicationName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string application_name = 2 [default = "N/A"];</code>
       * @param value The applicationName to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        applicationName_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string application_name = 2 [default = "N/A"];</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationName() {
        applicationName_ = getDefaultInstance().getApplicationName();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string application_name = 2 [default = "N/A"];</code>
       * @param value The bytes for applicationName to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        applicationName_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private java.lang.Object queue_ = "default";
      /**
       * <code>optional string queue = 3 [default = "default"];</code>
       * @return Whether the queue field is set.
       */
      public boolean hasQueue() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string queue = 3 [default = "default"];</code>
       * @return The queue.
       */
      public java.lang.String getQueue() {
        java.lang.Object ref = queue_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queue_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string queue = 3 [default = "default"];</code>
       * @return The bytes for queue.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueBytes() {
        java.lang.Object ref = queue_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queue_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string queue = 3 [default = "default"];</code>
       * @param value The queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueue(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 3 [default = "default"];</code>
       * @return This builder for chaining.
       */
      public Builder clearQueue() {
        queue_ = getDefaultInstance().getQueue();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string queue = 3 [default = "default"];</code>
       * @param value The bytes for queue to set.
       * @return This builder for chaining.
       */
      public Builder setQueueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        queue_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       * @return Whether the priority field is set.
       */
      public boolean hasPriority() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       * @return The priority.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
        if (priorityBuilder_ == null) {
          return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        } else {
          return priorityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          priority_ = value;
        } else {
          priorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public Builder setPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (priorityBuilder_ == null) {
          priority_ = builderForValue.build();
        } else {
          priorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0) &&
            priority_ != null &&
            priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            getPriorityBuilder().mergeFrom(value);
          } else {
            priority_ = value;
          }
        } else {
          priorityBuilder_.mergeFrom(value);
        }
        if (priority_ != null) {
          bitField0_ |= 0x00000008;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public Builder clearPriority() {
        bitField0_ = (bitField0_ & ~0x00000008);
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
        bitField0_ |= 0x00000008;
        onChanged();
        return getPriorityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
        if (priorityBuilder_ != null) {
          return priorityBuilder_.getMessageOrBuilder();
        } else {
          return priority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 4;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getPriorityFieldBuilder() {
        if (priorityBuilder_ == null) {
          priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getPriority(),
                  getParentForChildren(),
                  isClean());
          priority_ = null;
        }
        return priorityBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto amContainerSpec_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> amContainerSpecBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
       * @return Whether the amContainerSpec field is set.
       */
      public boolean hasAmContainerSpec() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
       * @return The amContainerSpec.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getAmContainerSpec() {
        if (amContainerSpecBuilder_ == null) {
          return amContainerSpec_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : amContainerSpec_;
        } else {
          return amContainerSpecBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
       */
      public Builder setAmContainerSpec(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) {
        if (amContainerSpecBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          amContainerSpec_ = value;
        } else {
          amContainerSpecBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
       */
      public Builder setAmContainerSpec(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder builderForValue) {
        if (amContainerSpecBuilder_ == null) {
          amContainerSpec_ = builderForValue.build();
        } else {
          amContainerSpecBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
       */
      public Builder mergeAmContainerSpec(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto value) {
        if (amContainerSpecBuilder_ == null) {
          if (((bitField0_ & 0x00000010) != 0) &&
            amContainerSpec_ != null &&
            amContainerSpec_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance()) {
            getAmContainerSpecBuilder().mergeFrom(value);
          } else {
            amContainerSpec_ = value;
          }
        } else {
          amContainerSpecBuilder_.mergeFrom(value);
        }
        if (amContainerSpec_ != null) {
          bitField0_ |= 0x00000010;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
       */
      public Builder clearAmContainerSpec() {
        bitField0_ = (bitField0_ & ~0x00000010);
        amContainerSpec_ = null;
        if (amContainerSpecBuilder_ != null) {
          amContainerSpecBuilder_.dispose();
          amContainerSpecBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder getAmContainerSpecBuilder() {
        bitField0_ |= 0x00000010;
        onChanged();
        return getAmContainerSpecFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder getAmContainerSpecOrBuilder() {
        if (amContainerSpecBuilder_ != null) {
          return amContainerSpecBuilder_.getMessageOrBuilder();
        } else {
          return amContainerSpec_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance() : amContainerSpec_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerLaunchContextProto am_container_spec = 5;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder> 
          getAmContainerSpecFieldBuilder() {
        if (amContainerSpecBuilder_ == null) {
          amContainerSpecBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder>(
                  getAmContainerSpec(),
                  getParentForChildren(),
                  isClean());
          amContainerSpec_ = null;
        }
        return amContainerSpecBuilder_;
      }

      private boolean cancelTokensWhenComplete_ = true;
      /**
       * <code>optional bool cancel_tokens_when_complete = 6 [default = true];</code>
       * @return Whether the cancelTokensWhenComplete field is set.
       */
      @java.lang.Override
      public boolean hasCancelTokensWhenComplete() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional bool cancel_tokens_when_complete = 6 [default = true];</code>
       * @return The cancelTokensWhenComplete.
       */
      @java.lang.Override
      public boolean getCancelTokensWhenComplete() {
        return cancelTokensWhenComplete_;
      }
      /**
       * <code>optional bool cancel_tokens_when_complete = 6 [default = true];</code>
       * @param value The cancelTokensWhenComplete to set.
       * @return This builder for chaining.
       */
      public Builder setCancelTokensWhenComplete(boolean value) {

        cancelTokensWhenComplete_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool cancel_tokens_when_complete = 6 [default = true];</code>
       * @return This builder for chaining.
       */
      public Builder clearCancelTokensWhenComplete() {
        bitField0_ = (bitField0_ & ~0x00000020);
        cancelTokensWhenComplete_ = true;
        onChanged();
        return this;
      }

      private boolean unmanagedAm_ ;
      /**
       * <code>optional bool unmanaged_am = 7 [default = false];</code>
       * @return Whether the unmanagedAm field is set.
       */
      @java.lang.Override
      public boolean hasUnmanagedAm() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional bool unmanaged_am = 7 [default = false];</code>
       * @return The unmanagedAm.
       */
      @java.lang.Override
      public boolean getUnmanagedAm() {
        return unmanagedAm_;
      }
      /**
       * <code>optional bool unmanaged_am = 7 [default = false];</code>
       * @param value The unmanagedAm to set.
       * @return This builder for chaining.
       */
      public Builder setUnmanagedAm(boolean value) {

        unmanagedAm_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool unmanaged_am = 7 [default = false];</code>
       * @return This builder for chaining.
       */
      public Builder clearUnmanagedAm() {
        bitField0_ = (bitField0_ & ~0x00000040);
        unmanagedAm_ = false;
        onChanged();
        return this;
      }

      private int maxAppAttempts_ ;
      /**
       * <code>optional int32 maxAppAttempts = 8 [default = 0];</code>
       * @return Whether the maxAppAttempts field is set.
       */
      @java.lang.Override
      public boolean hasMaxAppAttempts() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional int32 maxAppAttempts = 8 [default = 0];</code>
       * @return The maxAppAttempts.
       */
      @java.lang.Override
      public int getMaxAppAttempts() {
        return maxAppAttempts_;
      }
      /**
       * <code>optional int32 maxAppAttempts = 8 [default = 0];</code>
       * @param value The maxAppAttempts to set.
       * @return This builder for chaining.
       */
      public Builder setMaxAppAttempts(int value) {

        maxAppAttempts_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 maxAppAttempts = 8 [default = 0];</code>
       * @return This builder for chaining.
       */
      public Builder clearMaxAppAttempts() {
        bitField0_ = (bitField0_ & ~0x00000080);
        maxAppAttempts_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
       * @return Whether the resource field is set.
       */
      public boolean hasResource() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
       * @return The resource.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
        if (resourceBuilder_ == null) {
          return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
        } else {
          return resourceBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
       */
      public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resource_ = value;
        } else {
          resourceBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
       */
      public Builder setResource(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          resource_ = builderForValue.build();
        } else {
          resourceBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
       */
      public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourceBuilder_ == null) {
          if (((bitField0_ & 0x00000100) != 0) &&
            resource_ != null &&
            resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getResourceBuilder().mergeFrom(value);
          } else {
            resource_ = value;
          }
        } else {
          resourceBuilder_.mergeFrom(value);
        }
        if (resource_ != null) {
          bitField0_ |= 0x00000100;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
       */
      public Builder clearResource() {
        bitField0_ = (bitField0_ & ~0x00000100);
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() {
        bitField0_ |= 0x00000100;
        onChanged();
        return getResourceFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
        if (resourceBuilder_ != null) {
          return resourceBuilder_.getMessageOrBuilder();
        } else {
          return resource_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 9;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getResourceFieldBuilder() {
        if (resourceBuilder_ == null) {
          resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getResource(),
                  getParentForChildren(),
                  isClean());
          resource_ = null;
        }
        return resourceBuilder_;
      }

      private java.lang.Object applicationType_ = "YARN";
      /**
       * <code>optional string applicationType = 10 [default = "YARN"];</code>
       * @return Whether the applicationType field is set.
       */
      public boolean hasApplicationType() {
        return ((bitField0_ & 0x00000200) != 0);
      }
      /**
       * <code>optional string applicationType = 10 [default = "YARN"];</code>
       * @return The applicationType.
       */
      public java.lang.String getApplicationType() {
        java.lang.Object ref = applicationType_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            applicationType_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string applicationType = 10 [default = "YARN"];</code>
       * @return The bytes for applicationType.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationTypeBytes() {
        java.lang.Object ref = applicationType_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          applicationType_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string applicationType = 10 [default = "YARN"];</code>
       * @param value The applicationType to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationType(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        applicationType_ = value;
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>optional string applicationType = 10 [default = "YARN"];</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationType() {
        applicationType_ = getDefaultInstance().getApplicationType();
        bitField0_ = (bitField0_ & ~0x00000200);
        onChanged();
        return this;
      }
      /**
       * <code>optional string applicationType = 10 [default = "YARN"];</code>
       * @param value The bytes for applicationType to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationTypeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        applicationType_ = value;
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }

      private boolean keepContainersAcrossApplicationAttempts_ ;
      /**
       * <code>optional bool keep_containers_across_application_attempts = 11 [default = false];</code>
       * @return Whether the keepContainersAcrossApplicationAttempts field is set.
       */
      @java.lang.Override
      public boolean hasKeepContainersAcrossApplicationAttempts() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * <code>optional bool keep_containers_across_application_attempts = 11 [default = false];</code>
       * @return The keepContainersAcrossApplicationAttempts.
       */
      @java.lang.Override
      public boolean getKeepContainersAcrossApplicationAttempts() {
        return keepContainersAcrossApplicationAttempts_;
      }
      /**
       * <code>optional bool keep_containers_across_application_attempts = 11 [default = false];</code>
       * @param value The keepContainersAcrossApplicationAttempts to set.
       * @return This builder for chaining.
       */
      public Builder setKeepContainersAcrossApplicationAttempts(boolean value) {

        keepContainersAcrossApplicationAttempts_ = value;
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool keep_containers_across_application_attempts = 11 [default = false];</code>
       * @return This builder for chaining.
       */
      public Builder clearKeepContainersAcrossApplicationAttempts() {
        bitField0_ = (bitField0_ & ~0x00000400);
        keepContainersAcrossApplicationAttempts_ = false;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList applicationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureApplicationTagsIsMutable() {
        if (!applicationTags_.isModifiable()) {
          applicationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(applicationTags_);
        }
        bitField0_ |= 0x00000800;
      }
      /**
       * <code>repeated string applicationTags = 12;</code>
       * @return A list containing the applicationTags.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getApplicationTagsList() {
        applicationTags_.makeImmutable();
        return applicationTags_;
      }
      /**
       * <code>repeated string applicationTags = 12;</code>
       * @return The count of applicationTags.
       */
      public int getApplicationTagsCount() {
        return applicationTags_.size();
      }
      /**
       * <code>repeated string applicationTags = 12;</code>
       * @param index The index of the element to return.
       * @return The applicationTags at the given index.
       */
      public java.lang.String getApplicationTags(int index) {
        return applicationTags_.get(index);
      }
      /**
       * <code>repeated string applicationTags = 12;</code>
       * @param index The index of the value to return.
       * @return The bytes of the applicationTags at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getApplicationTagsBytes(int index) {
        return applicationTags_.getByteString(index);
      }
      /**
       * <code>repeated string applicationTags = 12;</code>
       * @param index The index to set the value at.
       * @param value The applicationTags to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationTags(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTagsIsMutable();
        applicationTags_.set(index, value);
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 12;</code>
       * @param value The applicationTags to add.
       * @return This builder for chaining.
       */
      public Builder addApplicationTags(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTagsIsMutable();
        applicationTags_.add(value);
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 12;</code>
       * @param values The applicationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllApplicationTags(
          java.lang.Iterable<java.lang.String> values) {
        ensureApplicationTagsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, applicationTags_);
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 12;</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationTags() {
        applicationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000800);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string applicationTags = 12;</code>
       * @param value The bytes of the applicationTags to add.
       * @return This builder for chaining.
       */
      public Builder addApplicationTagsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureApplicationTagsIsMutable();
        applicationTags_.add(value);
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }

      private long attemptFailuresValidityInterval_ = -1L;
      /**
       * <code>optional int64 attempt_failures_validity_interval = 13 [default = -1];</code>
       * @return Whether the attemptFailuresValidityInterval field is set.
       */
      @java.lang.Override
      public boolean hasAttemptFailuresValidityInterval() {
        return ((bitField0_ & 0x00001000) != 0);
      }
      /**
       * <code>optional int64 attempt_failures_validity_interval = 13 [default = -1];</code>
       * @return The attemptFailuresValidityInterval.
       */
      @java.lang.Override
      public long getAttemptFailuresValidityInterval() {
        return attemptFailuresValidityInterval_;
      }
      /**
       * <code>optional int64 attempt_failures_validity_interval = 13 [default = -1];</code>
       * @param value The attemptFailuresValidityInterval to set.
       * @return This builder for chaining.
       */
      public Builder setAttemptFailuresValidityInterval(long value) {

        attemptFailuresValidityInterval_ = value;
        bitField0_ |= 0x00001000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 attempt_failures_validity_interval = 13 [default = -1];</code>
       * @return This builder for chaining.
       */
      public Builder clearAttemptFailuresValidityInterval() {
        bitField0_ = (bitField0_ & ~0x00001000);
        attemptFailuresValidityInterval_ = -1L;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto logAggregationContext_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder> logAggregationContextBuilder_;
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
       * @return Whether the logAggregationContext field is set.
       */
      public boolean hasLogAggregationContext() {
        return ((bitField0_ & 0x00002000) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
       * @return The logAggregationContext.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext() {
        if (logAggregationContextBuilder_ == null) {
          return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
        } else {
          return logAggregationContextBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
       */
      public Builder setLogAggregationContext(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto value) {
        if (logAggregationContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          logAggregationContext_ = value;
        } else {
          logAggregationContextBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00002000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
       */
      public Builder setLogAggregationContext(
          org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder builderForValue) {
        if (logAggregationContextBuilder_ == null) {
          logAggregationContext_ = builderForValue.build();
        } else {
          logAggregationContextBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00002000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
       */
      public Builder mergeLogAggregationContext(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto value) {
        if (logAggregationContextBuilder_ == null) {
          if (((bitField0_ & 0x00002000) != 0) &&
            logAggregationContext_ != null &&
            logAggregationContext_ != org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance()) {
            getLogAggregationContextBuilder().mergeFrom(value);
          } else {
            logAggregationContext_ = value;
          }
        } else {
          logAggregationContextBuilder_.mergeFrom(value);
        }
        if (logAggregationContext_ != null) {
          bitField0_ |= 0x00002000;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
       */
      public Builder clearLogAggregationContext() {
        bitField0_ = (bitField0_ & ~0x00002000);
        logAggregationContext_ = null;
        if (logAggregationContextBuilder_ != null) {
          logAggregationContextBuilder_.dispose();
          logAggregationContextBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder getLogAggregationContextBuilder() {
        bitField0_ |= 0x00002000;
        onChanged();
        return getLogAggregationContextFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder() {
        if (logAggregationContextBuilder_ != null) {
          return logAggregationContextBuilder_.getMessageOrBuilder();
        } else {
          return logAggregationContext_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 14;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder> 
          getLogAggregationContextFieldBuilder() {
        if (logAggregationContextBuilder_ == null) {
          logAggregationContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder>(
                  getLogAggregationContext(),
                  getParentForChildren(),
                  isClean());
          logAggregationContext_ = null;
        }
        return logAggregationContextBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
       * @return Whether the reservationId field is set.
       */
      public boolean hasReservationId() {
        return ((bitField0_ & 0x00004000) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
       * @return The reservationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
        if (reservationIdBuilder_ == null) {
          return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        } else {
          return reservationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
       */
      public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservationId_ = value;
        } else {
          reservationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00004000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
       */
      public Builder setReservationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) {
        if (reservationIdBuilder_ == null) {
          reservationId_ = builderForValue.build();
        } else {
          reservationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00004000;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
       */
      public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (((bitField0_ & 0x00004000) != 0) &&
            reservationId_ != null &&
            reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) {
            getReservationIdBuilder().mergeFrom(value);
          } else {
            reservationId_ = value;
          }
        } else {
          reservationIdBuilder_.mergeFrom(value);
        }
        if (reservationId_ != null) {
          bitField0_ |= 0x00004000;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
       */
      public Builder clearReservationId() {
        bitField0_ = (bitField0_ & ~0x00004000);
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() {
        bitField0_ |= 0x00004000;
        onChanged();
        return getReservationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
        if (reservationIdBuilder_ != null) {
          return reservationIdBuilder_.getMessageOrBuilder();
        } else {
          return reservationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 15;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> 
          getReservationIdFieldBuilder() {
        if (reservationIdBuilder_ == null) {
          reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>(
                  getReservationId(),
                  getParentForChildren(),
                  isClean());
          reservationId_ = null;
        }
        return reservationIdBuilder_;
      }

      private java.lang.Object nodeLabelExpression_ = "";
      /**
       * <code>optional string node_label_expression = 16;</code>
       * @return Whether the nodeLabelExpression field is set.
       */
      public boolean hasNodeLabelExpression() {
        return ((bitField0_ & 0x00008000) != 0);
      }
      /**
       * <code>optional string node_label_expression = 16;</code>
       * @return The nodeLabelExpression.
       */
      public java.lang.String getNodeLabelExpression() {
        java.lang.Object ref = nodeLabelExpression_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            nodeLabelExpression_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string node_label_expression = 16;</code>
       * @return The bytes for nodeLabelExpression.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNodeLabelExpressionBytes() {
        java.lang.Object ref = nodeLabelExpression_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          nodeLabelExpression_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string node_label_expression = 16;</code>
       * @param value The nodeLabelExpression to set.
       * @return This builder for chaining.
       */
      public Builder setNodeLabelExpression(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        nodeLabelExpression_ = value;
        bitField0_ |= 0x00008000;
        onChanged();
        return this;
      }
      /**
       * <code>optional string node_label_expression = 16;</code>
       * @return This builder for chaining.
       */
      public Builder clearNodeLabelExpression() {
        nodeLabelExpression_ = getDefaultInstance().getNodeLabelExpression();
        bitField0_ = (bitField0_ & ~0x00008000);
        onChanged();
        return this;
      }
      /**
       * <code>optional string node_label_expression = 16;</code>
       * @param value The bytes for nodeLabelExpression to set.
       * @return This builder for chaining.
       */
      public Builder setNodeLabelExpressionBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        nodeLabelExpression_ = value;
        bitField0_ |= 0x00008000;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> amContainerResourceRequest_ =
        java.util.Collections.emptyList();
      private void ensureAmContainerResourceRequestIsMutable() {
        if (!((bitField0_ & 0x00010000) != 0)) {
          amContainerResourceRequest_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto>(amContainerResourceRequest_);
          bitField0_ |= 0x00010000;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> amContainerResourceRequestBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> getAmContainerResourceRequestList() {
        if (amContainerResourceRequestBuilder_ == null) {
          return java.util.Collections.unmodifiableList(amContainerResourceRequest_);
        } else {
          return amContainerResourceRequestBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public int getAmContainerResourceRequestCount() {
        if (amContainerResourceRequestBuilder_ == null) {
          return amContainerResourceRequest_.size();
        } else {
          return amContainerResourceRequestBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto getAmContainerResourceRequest(int index) {
        if (amContainerResourceRequestBuilder_ == null) {
          return amContainerResourceRequest_.get(index);
        } else {
          return amContainerResourceRequestBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public Builder setAmContainerResourceRequest(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) {
        if (amContainerResourceRequestBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAmContainerResourceRequestIsMutable();
          amContainerResourceRequest_.set(index, value);
          onChanged();
        } else {
          amContainerResourceRequestBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public Builder setAmContainerResourceRequest(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) {
        if (amContainerResourceRequestBuilder_ == null) {
          ensureAmContainerResourceRequestIsMutable();
          amContainerResourceRequest_.set(index, builderForValue.build());
          onChanged();
        } else {
          amContainerResourceRequestBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public Builder addAmContainerResourceRequest(org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) {
        if (amContainerResourceRequestBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAmContainerResourceRequestIsMutable();
          amContainerResourceRequest_.add(value);
          onChanged();
        } else {
          amContainerResourceRequestBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public Builder addAmContainerResourceRequest(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto value) {
        if (amContainerResourceRequestBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAmContainerResourceRequestIsMutable();
          amContainerResourceRequest_.add(index, value);
          onChanged();
        } else {
          amContainerResourceRequestBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public Builder addAmContainerResourceRequest(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) {
        if (amContainerResourceRequestBuilder_ == null) {
          ensureAmContainerResourceRequestIsMutable();
          amContainerResourceRequest_.add(builderForValue.build());
          onChanged();
        } else {
          amContainerResourceRequestBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public Builder addAmContainerResourceRequest(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder builderForValue) {
        if (amContainerResourceRequestBuilder_ == null) {
          ensureAmContainerResourceRequestIsMutable();
          amContainerResourceRequest_.add(index, builderForValue.build());
          onChanged();
        } else {
          amContainerResourceRequestBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public Builder addAllAmContainerResourceRequest(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto> values) {
        if (amContainerResourceRequestBuilder_ == null) {
          ensureAmContainerResourceRequestIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, amContainerResourceRequest_);
          onChanged();
        } else {
          amContainerResourceRequestBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public Builder clearAmContainerResourceRequest() {
        if (amContainerResourceRequestBuilder_ == null) {
          amContainerResourceRequest_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00010000);
          onChanged();
        } else {
          amContainerResourceRequestBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public Builder removeAmContainerResourceRequest(int index) {
        if (amContainerResourceRequestBuilder_ == null) {
          ensureAmContainerResourceRequestIsMutable();
          amContainerResourceRequest_.remove(index);
          onChanged();
        } else {
          amContainerResourceRequestBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder getAmContainerResourceRequestBuilder(
          int index) {
        return getAmContainerResourceRequestFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder getAmContainerResourceRequestOrBuilder(
          int index) {
        if (amContainerResourceRequestBuilder_ == null) {
          return amContainerResourceRequest_.get(index);  } else {
          return amContainerResourceRequestBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> 
           getAmContainerResourceRequestOrBuilderList() {
        if (amContainerResourceRequestBuilder_ != null) {
          return amContainerResourceRequestBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(amContainerResourceRequest_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder addAmContainerResourceRequestBuilder() {
        return getAmContainerResourceRequestFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder addAmContainerResourceRequestBuilder(
          int index) {
        return getAmContainerResourceRequestFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceRequestProto am_container_resource_request = 17;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder> 
           getAmContainerResourceRequestBuilderList() {
        return getAmContainerResourceRequestFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder> 
          getAmContainerResourceRequestFieldBuilder() {
        if (amContainerResourceRequestBuilder_ == null) {
          amContainerResourceRequestBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceRequestProtoOrBuilder>(
                  amContainerResourceRequest_,
                  ((bitField0_ & 0x00010000) != 0),
                  getParentForChildren(),
                  isClean());
          amContainerResourceRequest_ = null;
        }
        return amContainerResourceRequestBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto> applicationTimeouts_ =
        java.util.Collections.emptyList();
      private void ensureApplicationTimeoutsIsMutable() {
        if (!((bitField0_ & 0x00020000) != 0)) {
          applicationTimeouts_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto>(applicationTimeouts_);
          bitField0_ |= 0x00020000;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder> applicationTimeoutsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto> getApplicationTimeoutsList() {
        if (applicationTimeoutsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applicationTimeouts_);
        } else {
          return applicationTimeoutsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public int getApplicationTimeoutsCount() {
        if (applicationTimeoutsBuilder_ == null) {
          return applicationTimeouts_.size();
        } else {
          return applicationTimeoutsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getApplicationTimeouts(int index) {
        if (applicationTimeoutsBuilder_ == null) {
          return applicationTimeouts_.get(index);
        } else {
          return applicationTimeoutsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public Builder setApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto value) {
        if (applicationTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.set(index, value);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public Builder setApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder builderForValue) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationTimeoutsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public Builder addApplicationTimeouts(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto value) {
        if (applicationTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(value);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public Builder addApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto value) {
        if (applicationTimeoutsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(index, value);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public Builder addApplicationTimeouts(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder builderForValue) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(builderForValue.build());
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public Builder addApplicationTimeouts(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder builderForValue) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public Builder addAllApplicationTimeouts(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto> values) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applicationTimeouts_);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public Builder clearApplicationTimeouts() {
        if (applicationTimeoutsBuilder_ == null) {
          applicationTimeouts_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00020000);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public Builder removeApplicationTimeouts(int index) {
        if (applicationTimeoutsBuilder_ == null) {
          ensureApplicationTimeoutsIsMutable();
          applicationTimeouts_.remove(index);
          onChanged();
        } else {
          applicationTimeoutsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder getApplicationTimeoutsBuilder(
          int index) {
        return getApplicationTimeoutsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder getApplicationTimeoutsOrBuilder(
          int index) {
        if (applicationTimeoutsBuilder_ == null) {
          return applicationTimeouts_.get(index);  } else {
          return applicationTimeoutsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder> 
           getApplicationTimeoutsOrBuilderList() {
        if (applicationTimeoutsBuilder_ != null) {
          return applicationTimeoutsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applicationTimeouts_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder addApplicationTimeoutsBuilder() {
        return getApplicationTimeoutsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder addApplicationTimeoutsBuilder(
          int index) {
        return getApplicationTimeoutsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationTimeoutMapProto application_timeouts = 18;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder> 
           getApplicationTimeoutsBuilderList() {
        return getApplicationTimeoutsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder> 
          getApplicationTimeoutsFieldBuilder() {
        if (applicationTimeoutsBuilder_ == null) {
          applicationTimeoutsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder>(
                  applicationTimeouts_,
                  ((bitField0_ & 0x00020000) != 0),
                  getParentForChildren(),
                  isClean());
          applicationTimeouts_ = null;
        }
        return applicationTimeoutsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> applicationSchedulingProperties_ =
        java.util.Collections.emptyList();
      private void ensureApplicationSchedulingPropertiesIsMutable() {
        if (!((bitField0_ & 0x00040000) != 0)) {
          applicationSchedulingProperties_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto>(applicationSchedulingProperties_);
          bitField0_ |= 0x00040000;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> applicationSchedulingPropertiesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getApplicationSchedulingPropertiesList() {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applicationSchedulingProperties_);
        } else {
          return applicationSchedulingPropertiesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public int getApplicationSchedulingPropertiesCount() {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          return applicationSchedulingProperties_.size();
        } else {
          return applicationSchedulingPropertiesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getApplicationSchedulingProperties(int index) {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          return applicationSchedulingProperties_.get(index);
        } else {
          return applicationSchedulingPropertiesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public Builder setApplicationSchedulingProperties(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationSchedulingPropertiesIsMutable();
          applicationSchedulingProperties_.set(index, value);
          onChanged();
        } else {
          applicationSchedulingPropertiesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public Builder setApplicationSchedulingProperties(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          ensureApplicationSchedulingPropertiesIsMutable();
          applicationSchedulingProperties_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationSchedulingPropertiesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public Builder addApplicationSchedulingProperties(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationSchedulingPropertiesIsMutable();
          applicationSchedulingProperties_.add(value);
          onChanged();
        } else {
          applicationSchedulingPropertiesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public Builder addApplicationSchedulingProperties(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationSchedulingPropertiesIsMutable();
          applicationSchedulingProperties_.add(index, value);
          onChanged();
        } else {
          applicationSchedulingPropertiesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public Builder addApplicationSchedulingProperties(
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          ensureApplicationSchedulingPropertiesIsMutable();
          applicationSchedulingProperties_.add(builderForValue.build());
          onChanged();
        } else {
          applicationSchedulingPropertiesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public Builder addApplicationSchedulingProperties(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          ensureApplicationSchedulingPropertiesIsMutable();
          applicationSchedulingProperties_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationSchedulingPropertiesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public Builder addAllApplicationSchedulingProperties(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          ensureApplicationSchedulingPropertiesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applicationSchedulingProperties_);
          onChanged();
        } else {
          applicationSchedulingPropertiesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public Builder clearApplicationSchedulingProperties() {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          applicationSchedulingProperties_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00040000);
          onChanged();
        } else {
          applicationSchedulingPropertiesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public Builder removeApplicationSchedulingProperties(int index) {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          ensureApplicationSchedulingPropertiesIsMutable();
          applicationSchedulingProperties_.remove(index);
          onChanged();
        } else {
          applicationSchedulingPropertiesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getApplicationSchedulingPropertiesBuilder(
          int index) {
        return getApplicationSchedulingPropertiesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getApplicationSchedulingPropertiesOrBuilder(
          int index) {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          return applicationSchedulingProperties_.get(index);  } else {
          return applicationSchedulingPropertiesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
           getApplicationSchedulingPropertiesOrBuilderList() {
        if (applicationSchedulingPropertiesBuilder_ != null) {
          return applicationSchedulingPropertiesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applicationSchedulingProperties_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addApplicationSchedulingPropertiesBuilder() {
        return getApplicationSchedulingPropertiesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addApplicationSchedulingPropertiesBuilder(
          int index) {
        return getApplicationSchedulingPropertiesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto application_scheduling_properties = 19;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder> 
           getApplicationSchedulingPropertiesBuilderList() {
        return getApplicationSchedulingPropertiesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
          getApplicationSchedulingPropertiesFieldBuilder() {
        if (applicationSchedulingPropertiesBuilder_ == null) {
          applicationSchedulingPropertiesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
                  applicationSchedulingProperties_,
                  ((bitField0_ & 0x00040000) != 0),
                  getParentForChildren(),
                  isClean());
          applicationSchedulingProperties_ = null;
        }
        return applicationSchedulingPropertiesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationSubmissionContextProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationSubmissionContextProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationSubmissionContextProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ApplicationSubmissionContextProto>() {
      @java.lang.Override
      public ApplicationSubmissionContextProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationSubmissionContextProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationSubmissionContextProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationSubmissionContextProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationTimeoutMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationTimeoutMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return Whether the applicationTimeoutType field is set.
     */
    boolean hasApplicationTimeoutType();
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return The applicationTimeoutType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType();

    /**
     * <code>optional int64 timeout = 2;</code>
     * @return Whether the timeout field is set.
     */
    boolean hasTimeout();
    /**
     * <code>optional int64 timeout = 2;</code>
     * @return The timeout.
     */
    long getTimeout();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationTimeoutMapProto}
   */
  public static final class ApplicationTimeoutMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationTimeoutMapProto)
      ApplicationTimeoutMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationTimeoutMapProto.newBuilder() to construct.
    private ApplicationTimeoutMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ApplicationTimeoutMapProto() {
      applicationTimeoutType_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ApplicationTimeoutMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER = 1;
    private int applicationTimeoutType_ = 1;
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return Whether the applicationTimeoutType field is set.
     */
    @java.lang.Override public boolean hasApplicationTimeoutType() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return The applicationTimeoutType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result;
    }

    public static final int TIMEOUT_FIELD_NUMBER = 2;
    private long timeout_ = 0L;
    /**
     * <code>optional int64 timeout = 2;</code>
     * @return Whether the timeout field is set.
     */
    @java.lang.Override
    public boolean hasTimeout() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 timeout = 2;</code>
     * @return The timeout.
     */
    @java.lang.Override
    public long getTimeout() {
      return timeout_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, applicationTimeoutType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, timeout_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, applicationTimeoutType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, timeout_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto) obj;

      if (hasApplicationTimeoutType() != other.hasApplicationTimeoutType()) return false;
      if (hasApplicationTimeoutType()) {
        if (applicationTimeoutType_ != other.applicationTimeoutType_) return false;
      }
      if (hasTimeout() != other.hasTimeout()) return false;
      if (hasTimeout()) {
        if (getTimeout()
            != other.getTimeout()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationTimeoutType()) {
        hash = (37 * hash) + APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + applicationTimeoutType_;
      }
      if (hasTimeout()) {
        hash = (37 * hash) + TIMEOUT_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getTimeout());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationTimeoutMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationTimeoutMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationTimeoutType_ = 1;
        timeout_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationTimeoutType_ = applicationTimeoutType_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.timeout_ = timeout_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto.getDefaultInstance()) return this;
        if (other.hasApplicationTimeoutType()) {
          setApplicationTimeoutType(other.getApplicationTimeoutType());
        }
        if (other.hasTimeout()) {
          setTimeout(other.getTimeout());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  applicationTimeoutType_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 16: {
                timeout_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int applicationTimeoutType_ = 1;
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return Whether the applicationTimeoutType field is set.
       */
      @java.lang.Override public boolean hasApplicationTimeoutType() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return The applicationTimeoutType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @param value The applicationTimeoutType to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationTimeoutType(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        applicationTimeoutType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationTimeoutType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationTimeoutType_ = 1;
        onChanged();
        return this;
      }

      private long timeout_ ;
      /**
       * <code>optional int64 timeout = 2;</code>
       * @return Whether the timeout field is set.
       */
      @java.lang.Override
      public boolean hasTimeout() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int64 timeout = 2;</code>
       * @return The timeout.
       */
      @java.lang.Override
      public long getTimeout() {
        return timeout_;
      }
      /**
       * <code>optional int64 timeout = 2;</code>
       * @param value The timeout to set.
       * @return This builder for chaining.
       */
      public Builder setTimeout(long value) {

        timeout_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 timeout = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearTimeout() {
        bitField0_ = (bitField0_ & ~0x00000002);
        timeout_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationTimeoutMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationTimeoutMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationTimeoutMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ApplicationTimeoutMapProto>() {
      @java.lang.Override
      public ApplicationTimeoutMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationTimeoutMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationTimeoutMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationUpdateTimeoutMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationUpdateTimeoutMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return Whether the applicationTimeoutType field is set.
     */
    boolean hasApplicationTimeoutType();
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return The applicationTimeoutType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType();

    /**
     * <code>optional string expire_time = 2;</code>
     * @return Whether the expireTime field is set.
     */
    boolean hasExpireTime();
    /**
     * <code>optional string expire_time = 2;</code>
     * @return The expireTime.
     */
    java.lang.String getExpireTime();
    /**
     * <code>optional string expire_time = 2;</code>
     * @return The bytes for expireTime.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getExpireTimeBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationUpdateTimeoutMapProto}
   */
  public static final class ApplicationUpdateTimeoutMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationUpdateTimeoutMapProto)
      ApplicationUpdateTimeoutMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationUpdateTimeoutMapProto.newBuilder() to construct.
    private ApplicationUpdateTimeoutMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ApplicationUpdateTimeoutMapProto() {
      applicationTimeoutType_ = 1;
      expireTime_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ApplicationUpdateTimeoutMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER = 1;
    private int applicationTimeoutType_ = 1;
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return Whether the applicationTimeoutType field is set.
     */
    @java.lang.Override public boolean hasApplicationTimeoutType() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
     * @return The applicationTimeoutType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result;
    }

    public static final int EXPIRE_TIME_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object expireTime_ = "";
    /**
     * <code>optional string expire_time = 2;</code>
     * @return Whether the expireTime field is set.
     */
    @java.lang.Override
    public boolean hasExpireTime() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string expire_time = 2;</code>
     * @return The expireTime.
     */
    @java.lang.Override
    public java.lang.String getExpireTime() {
      java.lang.Object ref = expireTime_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          expireTime_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string expire_time = 2;</code>
     * @return The bytes for expireTime.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getExpireTimeBytes() {
      java.lang.Object ref = expireTime_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        expireTime_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, applicationTimeoutType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, expireTime_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, applicationTimeoutType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, expireTime_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto) obj;

      if (hasApplicationTimeoutType() != other.hasApplicationTimeoutType()) return false;
      if (hasApplicationTimeoutType()) {
        if (applicationTimeoutType_ != other.applicationTimeoutType_) return false;
      }
      if (hasExpireTime() != other.hasExpireTime()) return false;
      if (hasExpireTime()) {
        if (!getExpireTime()
            .equals(other.getExpireTime())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasApplicationTimeoutType()) {
        hash = (37 * hash) + APPLICATION_TIMEOUT_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + applicationTimeoutType_;
      }
      if (hasExpireTime()) {
        hash = (37 * hash) + EXPIRE_TIME_FIELD_NUMBER;
        hash = (53 * hash) + getExpireTime().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationUpdateTimeoutMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationUpdateTimeoutMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        applicationTimeoutType_ = 1;
        expireTime_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.applicationTimeoutType_ = applicationTimeoutType_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.expireTime_ = expireTime_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto.getDefaultInstance()) return this;
        if (other.hasApplicationTimeoutType()) {
          setApplicationTimeoutType(other.getApplicationTimeoutType());
        }
        if (other.hasExpireTime()) {
          expireTime_ = other.expireTime_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  applicationTimeoutType_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 18: {
                expireTime_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int applicationTimeoutType_ = 1;
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return Whether the applicationTimeoutType field is set.
       */
      @java.lang.Override public boolean hasApplicationTimeoutType() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return The applicationTimeoutType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto getApplicationTimeoutType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.forNumber(applicationTimeoutType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto.APP_TIMEOUT_LIFETIME : result;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @param value The applicationTimeoutType to set.
       * @return This builder for chaining.
       */
      public Builder setApplicationTimeoutType(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationTimeoutTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        applicationTimeoutType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationTimeoutTypeProto application_timeout_type = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearApplicationTimeoutType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        applicationTimeoutType_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object expireTime_ = "";
      /**
       * <code>optional string expire_time = 2;</code>
       * @return Whether the expireTime field is set.
       */
      public boolean hasExpireTime() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string expire_time = 2;</code>
       * @return The expireTime.
       */
      public java.lang.String getExpireTime() {
        java.lang.Object ref = expireTime_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            expireTime_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string expire_time = 2;</code>
       * @return The bytes for expireTime.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getExpireTimeBytes() {
        java.lang.Object ref = expireTime_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          expireTime_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string expire_time = 2;</code>
       * @param value The expireTime to set.
       * @return This builder for chaining.
       */
      public Builder setExpireTime(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        expireTime_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string expire_time = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearExpireTime() {
        expireTime_ = getDefaultInstance().getExpireTime();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string expire_time = 2;</code>
       * @param value The bytes for expireTime to set.
       * @return This builder for chaining.
       */
      public Builder setExpireTimeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        expireTime_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationUpdateTimeoutMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationUpdateTimeoutMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationUpdateTimeoutMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ApplicationUpdateTimeoutMapProto>() {
      @java.lang.Override
      public ApplicationUpdateTimeoutMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationUpdateTimeoutMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationUpdateTimeoutMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationUpdateTimeoutMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface LogAggregationContextProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.LogAggregationContextProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string include_pattern = 1 [default = ".*"];</code>
     * @return Whether the includePattern field is set.
     */
    boolean hasIncludePattern();
    /**
     * <code>optional string include_pattern = 1 [default = ".*"];</code>
     * @return The includePattern.
     */
    java.lang.String getIncludePattern();
    /**
     * <code>optional string include_pattern = 1 [default = ".*"];</code>
     * @return The bytes for includePattern.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getIncludePatternBytes();

    /**
     * <code>optional string exclude_pattern = 2 [default = ""];</code>
     * @return Whether the excludePattern field is set.
     */
    boolean hasExcludePattern();
    /**
     * <code>optional string exclude_pattern = 2 [default = ""];</code>
     * @return The excludePattern.
     */
    java.lang.String getExcludePattern();
    /**
     * <code>optional string exclude_pattern = 2 [default = ""];</code>
     * @return The bytes for excludePattern.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getExcludePatternBytes();

    /**
     * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
     * @return Whether the rolledLogsIncludePattern field is set.
     */
    boolean hasRolledLogsIncludePattern();
    /**
     * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
     * @return The rolledLogsIncludePattern.
     */
    java.lang.String getRolledLogsIncludePattern();
    /**
     * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
     * @return The bytes for rolledLogsIncludePattern.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getRolledLogsIncludePatternBytes();

    /**
     * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
     * @return Whether the rolledLogsExcludePattern field is set.
     */
    boolean hasRolledLogsExcludePattern();
    /**
     * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
     * @return The rolledLogsExcludePattern.
     */
    java.lang.String getRolledLogsExcludePattern();
    /**
     * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
     * @return The bytes for rolledLogsExcludePattern.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getRolledLogsExcludePatternBytes();

    /**
     * <code>optional string log_aggregation_policy_class_name = 5;</code>
     * @return Whether the logAggregationPolicyClassName field is set.
     */
    boolean hasLogAggregationPolicyClassName();
    /**
     * <code>optional string log_aggregation_policy_class_name = 5;</code>
     * @return The logAggregationPolicyClassName.
     */
    java.lang.String getLogAggregationPolicyClassName();
    /**
     * <code>optional string log_aggregation_policy_class_name = 5;</code>
     * @return The bytes for logAggregationPolicyClassName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getLogAggregationPolicyClassNameBytes();

    /**
     * <code>optional string log_aggregation_policy_parameters = 6;</code>
     * @return Whether the logAggregationPolicyParameters field is set.
     */
    boolean hasLogAggregationPolicyParameters();
    /**
     * <code>optional string log_aggregation_policy_parameters = 6;</code>
     * @return The logAggregationPolicyParameters.
     */
    java.lang.String getLogAggregationPolicyParameters();
    /**
     * <code>optional string log_aggregation_policy_parameters = 6;</code>
     * @return The bytes for logAggregationPolicyParameters.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getLogAggregationPolicyParametersBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.LogAggregationContextProto}
   */
  public static final class LogAggregationContextProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.LogAggregationContextProto)
      LogAggregationContextProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use LogAggregationContextProto.newBuilder() to construct.
    private LogAggregationContextProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private LogAggregationContextProto() {
      includePattern_ = ".*";
      excludePattern_ = "";
      rolledLogsIncludePattern_ = "";
      rolledLogsExcludePattern_ = ".*";
      logAggregationPolicyClassName_ = "";
      logAggregationPolicyParameters_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new LogAggregationContextProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LogAggregationContextProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LogAggregationContextProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder.class);
    }

    private int bitField0_;
    public static final int INCLUDE_PATTERN_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object includePattern_ = ".*";
    /**
     * <code>optional string include_pattern = 1 [default = ".*"];</code>
     * @return Whether the includePattern field is set.
     */
    @java.lang.Override
    public boolean hasIncludePattern() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string include_pattern = 1 [default = ".*"];</code>
     * @return The includePattern.
     */
    @java.lang.Override
    public java.lang.String getIncludePattern() {
      java.lang.Object ref = includePattern_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          includePattern_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string include_pattern = 1 [default = ".*"];</code>
     * @return The bytes for includePattern.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getIncludePatternBytes() {
      java.lang.Object ref = includePattern_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        includePattern_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int EXCLUDE_PATTERN_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object excludePattern_ = "";
    /**
     * <code>optional string exclude_pattern = 2 [default = ""];</code>
     * @return Whether the excludePattern field is set.
     */
    @java.lang.Override
    public boolean hasExcludePattern() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string exclude_pattern = 2 [default = ""];</code>
     * @return The excludePattern.
     */
    @java.lang.Override
    public java.lang.String getExcludePattern() {
      java.lang.Object ref = excludePattern_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          excludePattern_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string exclude_pattern = 2 [default = ""];</code>
     * @return The bytes for excludePattern.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getExcludePatternBytes() {
      java.lang.Object ref = excludePattern_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        excludePattern_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int ROLLED_LOGS_INCLUDE_PATTERN_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object rolledLogsIncludePattern_ = "";
    /**
     * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
     * @return Whether the rolledLogsIncludePattern field is set.
     */
    @java.lang.Override
    public boolean hasRolledLogsIncludePattern() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
     * @return The rolledLogsIncludePattern.
     */
    @java.lang.Override
    public java.lang.String getRolledLogsIncludePattern() {
      java.lang.Object ref = rolledLogsIncludePattern_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          rolledLogsIncludePattern_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
     * @return The bytes for rolledLogsIncludePattern.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getRolledLogsIncludePatternBytes() {
      java.lang.Object ref = rolledLogsIncludePattern_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        rolledLogsIncludePattern_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int ROLLED_LOGS_EXCLUDE_PATTERN_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private volatile java.lang.Object rolledLogsExcludePattern_ = ".*";
    /**
     * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
     * @return Whether the rolledLogsExcludePattern field is set.
     */
    @java.lang.Override
    public boolean hasRolledLogsExcludePattern() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
     * @return The rolledLogsExcludePattern.
     */
    @java.lang.Override
    public java.lang.String getRolledLogsExcludePattern() {
      java.lang.Object ref = rolledLogsExcludePattern_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          rolledLogsExcludePattern_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
     * @return The bytes for rolledLogsExcludePattern.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getRolledLogsExcludePatternBytes() {
      java.lang.Object ref = rolledLogsExcludePattern_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        rolledLogsExcludePattern_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int LOG_AGGREGATION_POLICY_CLASS_NAME_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private volatile java.lang.Object logAggregationPolicyClassName_ = "";
    /**
     * <code>optional string log_aggregation_policy_class_name = 5;</code>
     * @return Whether the logAggregationPolicyClassName field is set.
     */
    @java.lang.Override
    public boolean hasLogAggregationPolicyClassName() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional string log_aggregation_policy_class_name = 5;</code>
     * @return The logAggregationPolicyClassName.
     */
    @java.lang.Override
    public java.lang.String getLogAggregationPolicyClassName() {
      java.lang.Object ref = logAggregationPolicyClassName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          logAggregationPolicyClassName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string log_aggregation_policy_class_name = 5;</code>
     * @return The bytes for logAggregationPolicyClassName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getLogAggregationPolicyClassNameBytes() {
      java.lang.Object ref = logAggregationPolicyClassName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        logAggregationPolicyClassName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int LOG_AGGREGATION_POLICY_PARAMETERS_FIELD_NUMBER = 6;
    @SuppressWarnings("serial")
    private volatile java.lang.Object logAggregationPolicyParameters_ = "";
    /**
     * <code>optional string log_aggregation_policy_parameters = 6;</code>
     * @return Whether the logAggregationPolicyParameters field is set.
     */
    @java.lang.Override
    public boolean hasLogAggregationPolicyParameters() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional string log_aggregation_policy_parameters = 6;</code>
     * @return The logAggregationPolicyParameters.
     */
    @java.lang.Override
    public java.lang.String getLogAggregationPolicyParameters() {
      java.lang.Object ref = logAggregationPolicyParameters_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          logAggregationPolicyParameters_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string log_aggregation_policy_parameters = 6;</code>
     * @return The bytes for logAggregationPolicyParameters.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getLogAggregationPolicyParametersBytes() {
      java.lang.Object ref = logAggregationPolicyParameters_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        logAggregationPolicyParameters_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, includePattern_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, excludePattern_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, rolledLogsIncludePattern_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, rolledLogsExcludePattern_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, logAggregationPolicyClassName_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 6, logAggregationPolicyParameters_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, includePattern_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, excludePattern_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, rolledLogsIncludePattern_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, rolledLogsExcludePattern_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, logAggregationPolicyClassName_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(6, logAggregationPolicyParameters_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto other = (org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto) obj;

      if (hasIncludePattern() != other.hasIncludePattern()) return false;
      if (hasIncludePattern()) {
        if (!getIncludePattern()
            .equals(other.getIncludePattern())) return false;
      }
      if (hasExcludePattern() != other.hasExcludePattern()) return false;
      if (hasExcludePattern()) {
        if (!getExcludePattern()
            .equals(other.getExcludePattern())) return false;
      }
      if (hasRolledLogsIncludePattern() != other.hasRolledLogsIncludePattern()) return false;
      if (hasRolledLogsIncludePattern()) {
        if (!getRolledLogsIncludePattern()
            .equals(other.getRolledLogsIncludePattern())) return false;
      }
      if (hasRolledLogsExcludePattern() != other.hasRolledLogsExcludePattern()) return false;
      if (hasRolledLogsExcludePattern()) {
        if (!getRolledLogsExcludePattern()
            .equals(other.getRolledLogsExcludePattern())) return false;
      }
      if (hasLogAggregationPolicyClassName() != other.hasLogAggregationPolicyClassName()) return false;
      if (hasLogAggregationPolicyClassName()) {
        if (!getLogAggregationPolicyClassName()
            .equals(other.getLogAggregationPolicyClassName())) return false;
      }
      if (hasLogAggregationPolicyParameters() != other.hasLogAggregationPolicyParameters()) return false;
      if (hasLogAggregationPolicyParameters()) {
        if (!getLogAggregationPolicyParameters()
            .equals(other.getLogAggregationPolicyParameters())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasIncludePattern()) {
        hash = (37 * hash) + INCLUDE_PATTERN_FIELD_NUMBER;
        hash = (53 * hash) + getIncludePattern().hashCode();
      }
      if (hasExcludePattern()) {
        hash = (37 * hash) + EXCLUDE_PATTERN_FIELD_NUMBER;
        hash = (53 * hash) + getExcludePattern().hashCode();
      }
      if (hasRolledLogsIncludePattern()) {
        hash = (37 * hash) + ROLLED_LOGS_INCLUDE_PATTERN_FIELD_NUMBER;
        hash = (53 * hash) + getRolledLogsIncludePattern().hashCode();
      }
      if (hasRolledLogsExcludePattern()) {
        hash = (37 * hash) + ROLLED_LOGS_EXCLUDE_PATTERN_FIELD_NUMBER;
        hash = (53 * hash) + getRolledLogsExcludePattern().hashCode();
      }
      if (hasLogAggregationPolicyClassName()) {
        hash = (37 * hash) + LOG_AGGREGATION_POLICY_CLASS_NAME_FIELD_NUMBER;
        hash = (53 * hash) + getLogAggregationPolicyClassName().hashCode();
      }
      if (hasLogAggregationPolicyParameters()) {
        hash = (37 * hash) + LOG_AGGREGATION_POLICY_PARAMETERS_FIELD_NUMBER;
        hash = (53 * hash) + getLogAggregationPolicyParameters().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.LogAggregationContextProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.LogAggregationContextProto)
        org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LogAggregationContextProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LogAggregationContextProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        includePattern_ = ".*";
        excludePattern_ = "";
        rolledLogsIncludePattern_ = "";
        rolledLogsExcludePattern_ = ".*";
        logAggregationPolicyClassName_ = "";
        logAggregationPolicyParameters_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_LogAggregationContextProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto result = new org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.includePattern_ = includePattern_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.excludePattern_ = excludePattern_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.rolledLogsIncludePattern_ = rolledLogsIncludePattern_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.rolledLogsExcludePattern_ = rolledLogsExcludePattern_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.logAggregationPolicyClassName_ = logAggregationPolicyClassName_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.logAggregationPolicyParameters_ = logAggregationPolicyParameters_;
          to_bitField0_ |= 0x00000020;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance()) return this;
        if (other.hasIncludePattern()) {
          includePattern_ = other.includePattern_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasExcludePattern()) {
          excludePattern_ = other.excludePattern_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasRolledLogsIncludePattern()) {
          rolledLogsIncludePattern_ = other.rolledLogsIncludePattern_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasRolledLogsExcludePattern()) {
          rolledLogsExcludePattern_ = other.rolledLogsExcludePattern_;
          bitField0_ |= 0x00000008;
          onChanged();
        }
        if (other.hasLogAggregationPolicyClassName()) {
          logAggregationPolicyClassName_ = other.logAggregationPolicyClassName_;
          bitField0_ |= 0x00000010;
          onChanged();
        }
        if (other.hasLogAggregationPolicyParameters()) {
          logAggregationPolicyParameters_ = other.logAggregationPolicyParameters_;
          bitField0_ |= 0x00000020;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                includePattern_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                excludePattern_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                rolledLogsIncludePattern_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                rolledLogsExcludePattern_ = input.readBytes();
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                logAggregationPolicyClassName_ = input.readBytes();
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 50: {
                logAggregationPolicyParameters_ = input.readBytes();
                bitField0_ |= 0x00000020;
                break;
              } // case 50
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object includePattern_ = ".*";
      /**
       * <code>optional string include_pattern = 1 [default = ".*"];</code>
       * @return Whether the includePattern field is set.
       */
      public boolean hasIncludePattern() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string include_pattern = 1 [default = ".*"];</code>
       * @return The includePattern.
       */
      public java.lang.String getIncludePattern() {
        java.lang.Object ref = includePattern_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            includePattern_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string include_pattern = 1 [default = ".*"];</code>
       * @return The bytes for includePattern.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getIncludePatternBytes() {
        java.lang.Object ref = includePattern_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          includePattern_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string include_pattern = 1 [default = ".*"];</code>
       * @param value The includePattern to set.
       * @return This builder for chaining.
       */
      public Builder setIncludePattern(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        includePattern_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string include_pattern = 1 [default = ".*"];</code>
       * @return This builder for chaining.
       */
      public Builder clearIncludePattern() {
        includePattern_ = getDefaultInstance().getIncludePattern();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string include_pattern = 1 [default = ".*"];</code>
       * @param value The bytes for includePattern to set.
       * @return This builder for chaining.
       */
      public Builder setIncludePatternBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        includePattern_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object excludePattern_ = "";
      /**
       * <code>optional string exclude_pattern = 2 [default = ""];</code>
       * @return Whether the excludePattern field is set.
       */
      public boolean hasExcludePattern() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string exclude_pattern = 2 [default = ""];</code>
       * @return The excludePattern.
       */
      public java.lang.String getExcludePattern() {
        java.lang.Object ref = excludePattern_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            excludePattern_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string exclude_pattern = 2 [default = ""];</code>
       * @return The bytes for excludePattern.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getExcludePatternBytes() {
        java.lang.Object ref = excludePattern_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          excludePattern_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string exclude_pattern = 2 [default = ""];</code>
       * @param value The excludePattern to set.
       * @return This builder for chaining.
       */
      public Builder setExcludePattern(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        excludePattern_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string exclude_pattern = 2 [default = ""];</code>
       * @return This builder for chaining.
       */
      public Builder clearExcludePattern() {
        excludePattern_ = getDefaultInstance().getExcludePattern();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string exclude_pattern = 2 [default = ""];</code>
       * @param value The bytes for excludePattern to set.
       * @return This builder for chaining.
       */
      public Builder setExcludePatternBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        excludePattern_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private java.lang.Object rolledLogsIncludePattern_ = "";
      /**
       * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
       * @return Whether the rolledLogsIncludePattern field is set.
       */
      public boolean hasRolledLogsIncludePattern() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
       * @return The rolledLogsIncludePattern.
       */
      public java.lang.String getRolledLogsIncludePattern() {
        java.lang.Object ref = rolledLogsIncludePattern_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            rolledLogsIncludePattern_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
       * @return The bytes for rolledLogsIncludePattern.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getRolledLogsIncludePatternBytes() {
        java.lang.Object ref = rolledLogsIncludePattern_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          rolledLogsIncludePattern_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
       * @param value The rolledLogsIncludePattern to set.
       * @return This builder for chaining.
       */
      public Builder setRolledLogsIncludePattern(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        rolledLogsIncludePattern_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
       * @return This builder for chaining.
       */
      public Builder clearRolledLogsIncludePattern() {
        rolledLogsIncludePattern_ = getDefaultInstance().getRolledLogsIncludePattern();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string rolled_logs_include_pattern = 3 [default = ""];</code>
       * @param value The bytes for rolledLogsIncludePattern to set.
       * @return This builder for chaining.
       */
      public Builder setRolledLogsIncludePatternBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        rolledLogsIncludePattern_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private java.lang.Object rolledLogsExcludePattern_ = ".*";
      /**
       * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
       * @return Whether the rolledLogsExcludePattern field is set.
       */
      public boolean hasRolledLogsExcludePattern() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
       * @return The rolledLogsExcludePattern.
       */
      public java.lang.String getRolledLogsExcludePattern() {
        java.lang.Object ref = rolledLogsExcludePattern_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            rolledLogsExcludePattern_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
       * @return The bytes for rolledLogsExcludePattern.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getRolledLogsExcludePatternBytes() {
        java.lang.Object ref = rolledLogsExcludePattern_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          rolledLogsExcludePattern_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
       * @param value The rolledLogsExcludePattern to set.
       * @return This builder for chaining.
       */
      public Builder setRolledLogsExcludePattern(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        rolledLogsExcludePattern_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
       * @return This builder for chaining.
       */
      public Builder clearRolledLogsExcludePattern() {
        rolledLogsExcludePattern_ = getDefaultInstance().getRolledLogsExcludePattern();
        bitField0_ = (bitField0_ & ~0x00000008);
        onChanged();
        return this;
      }
      /**
       * <code>optional string rolled_logs_exclude_pattern = 4 [default = ".*"];</code>
       * @param value The bytes for rolledLogsExcludePattern to set.
       * @return This builder for chaining.
       */
      public Builder setRolledLogsExcludePatternBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        rolledLogsExcludePattern_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }

      private java.lang.Object logAggregationPolicyClassName_ = "";
      /**
       * <code>optional string log_aggregation_policy_class_name = 5;</code>
       * @return Whether the logAggregationPolicyClassName field is set.
       */
      public boolean hasLogAggregationPolicyClassName() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional string log_aggregation_policy_class_name = 5;</code>
       * @return The logAggregationPolicyClassName.
       */
      public java.lang.String getLogAggregationPolicyClassName() {
        java.lang.Object ref = logAggregationPolicyClassName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            logAggregationPolicyClassName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string log_aggregation_policy_class_name = 5;</code>
       * @return The bytes for logAggregationPolicyClassName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getLogAggregationPolicyClassNameBytes() {
        java.lang.Object ref = logAggregationPolicyClassName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          logAggregationPolicyClassName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string log_aggregation_policy_class_name = 5;</code>
       * @param value The logAggregationPolicyClassName to set.
       * @return This builder for chaining.
       */
      public Builder setLogAggregationPolicyClassName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        logAggregationPolicyClassName_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional string log_aggregation_policy_class_name = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearLogAggregationPolicyClassName() {
        logAggregationPolicyClassName_ = getDefaultInstance().getLogAggregationPolicyClassName();
        bitField0_ = (bitField0_ & ~0x00000010);
        onChanged();
        return this;
      }
      /**
       * <code>optional string log_aggregation_policy_class_name = 5;</code>
       * @param value The bytes for logAggregationPolicyClassName to set.
       * @return This builder for chaining.
       */
      public Builder setLogAggregationPolicyClassNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        logAggregationPolicyClassName_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }

      private java.lang.Object logAggregationPolicyParameters_ = "";
      /**
       * <code>optional string log_aggregation_policy_parameters = 6;</code>
       * @return Whether the logAggregationPolicyParameters field is set.
       */
      public boolean hasLogAggregationPolicyParameters() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional string log_aggregation_policy_parameters = 6;</code>
       * @return The logAggregationPolicyParameters.
       */
      public java.lang.String getLogAggregationPolicyParameters() {
        java.lang.Object ref = logAggregationPolicyParameters_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            logAggregationPolicyParameters_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string log_aggregation_policy_parameters = 6;</code>
       * @return The bytes for logAggregationPolicyParameters.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getLogAggregationPolicyParametersBytes() {
        java.lang.Object ref = logAggregationPolicyParameters_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          logAggregationPolicyParameters_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string log_aggregation_policy_parameters = 6;</code>
       * @param value The logAggregationPolicyParameters to set.
       * @return This builder for chaining.
       */
      public Builder setLogAggregationPolicyParameters(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        logAggregationPolicyParameters_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional string log_aggregation_policy_parameters = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearLogAggregationPolicyParameters() {
        logAggregationPolicyParameters_ = getDefaultInstance().getLogAggregationPolicyParameters();
        bitField0_ = (bitField0_ & ~0x00000020);
        onChanged();
        return this;
      }
      /**
       * <code>optional string log_aggregation_policy_parameters = 6;</code>
       * @param value The bytes for logAggregationPolicyParameters to set.
       * @return This builder for chaining.
       */
      public Builder setLogAggregationPolicyParametersBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        logAggregationPolicyParameters_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.LogAggregationContextProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.LogAggregationContextProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<LogAggregationContextProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<LogAggregationContextProto>() {
      @java.lang.Override
      public LogAggregationContextProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<LogAggregationContextProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<LogAggregationContextProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ApplicationACLMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ApplicationACLMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1;</code>
     * @return Whether the accessType field is set.
     */
    boolean hasAccessType();
    /**
     * <code>optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1;</code>
     * @return The accessType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto getAccessType();

    /**
     * <code>optional string acl = 2 [default = " "];</code>
     * @return Whether the acl field is set.
     */
    boolean hasAcl();
    /**
     * <code>optional string acl = 2 [default = " "];</code>
     * @return The acl.
     */
    java.lang.String getAcl();
    /**
     * <code>optional string acl = 2 [default = " "];</code>
     * @return The bytes for acl.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAclBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ApplicationACLMapProto}
   */
  public static final class ApplicationACLMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ApplicationACLMapProto)
      ApplicationACLMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ApplicationACLMapProto.newBuilder() to construct.
    private ApplicationACLMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ApplicationACLMapProto() {
      accessType_ = 1;
      acl_ = " ";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ApplicationACLMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationACLMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int ACCESSTYPE_FIELD_NUMBER = 1;
    private int accessType_ = 1;
    /**
     * <code>optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1;</code>
     * @return Whether the accessType field is set.
     */
    @java.lang.Override public boolean hasAccessType() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1;</code>
     * @return The accessType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto getAccessType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto.forNumber(accessType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto.APPACCESS_VIEW_APP : result;
    }

    public static final int ACL_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object acl_ = " ";
    /**
     * <code>optional string acl = 2 [default = " "];</code>
     * @return Whether the acl field is set.
     */
    @java.lang.Override
    public boolean hasAcl() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string acl = 2 [default = " "];</code>
     * @return The acl.
     */
    @java.lang.Override
    public java.lang.String getAcl() {
      java.lang.Object ref = acl_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          acl_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string acl = 2 [default = " "];</code>
     * @return The bytes for acl.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAclBytes() {
      java.lang.Object ref = acl_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        acl_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, accessType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, acl_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, accessType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, acl_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto) obj;

      if (hasAccessType() != other.hasAccessType()) return false;
      if (hasAccessType()) {
        if (accessType_ != other.accessType_) return false;
      }
      if (hasAcl() != other.hasAcl()) return false;
      if (hasAcl()) {
        if (!getAcl()
            .equals(other.getAcl())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasAccessType()) {
        hash = (37 * hash) + ACCESSTYPE_FIELD_NUMBER;
        hash = (53 * hash) + accessType_;
      }
      if (hasAcl()) {
        hash = (37 * hash) + ACL_FIELD_NUMBER;
        hash = (53 * hash) + getAcl().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ApplicationACLMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ApplicationACLMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationACLMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        accessType_ = 1;
        acl_ = " ";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.accessType_ = accessType_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.acl_ = acl_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance()) return this;
        if (other.hasAccessType()) {
          setAccessType(other.getAccessType());
        }
        if (other.hasAcl()) {
          acl_ = other.acl_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  accessType_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 18: {
                acl_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int accessType_ = 1;
      /**
       * <code>optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1;</code>
       * @return Whether the accessType field is set.
       */
      @java.lang.Override public boolean hasAccessType() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1;</code>
       * @return The accessType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto getAccessType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto.forNumber(accessType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto.APPACCESS_VIEW_APP : result;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1;</code>
       * @param value The accessType to set.
       * @return This builder for chaining.
       */
      public Builder setAccessType(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAccessTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        accessType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationAccessTypeProto accessType = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearAccessType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        accessType_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object acl_ = " ";
      /**
       * <code>optional string acl = 2 [default = " "];</code>
       * @return Whether the acl field is set.
       */
      public boolean hasAcl() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string acl = 2 [default = " "];</code>
       * @return The acl.
       */
      public java.lang.String getAcl() {
        java.lang.Object ref = acl_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            acl_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string acl = 2 [default = " "];</code>
       * @return The bytes for acl.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAclBytes() {
        java.lang.Object ref = acl_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          acl_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string acl = 2 [default = " "];</code>
       * @param value The acl to set.
       * @return This builder for chaining.
       */
      public Builder setAcl(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        acl_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string acl = 2 [default = " "];</code>
       * @return This builder for chaining.
       */
      public Builder clearAcl() {
        acl_ = getDefaultInstance().getAcl();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string acl = 2 [default = " "];</code>
       * @param value The bytes for acl to set.
       * @return This builder for chaining.
       */
      public Builder setAclBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        acl_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ApplicationACLMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ApplicationACLMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationACLMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ApplicationACLMapProto>() {
      @java.lang.Override
      public ApplicationACLMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationACLMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ApplicationACLMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface YarnClusterMetricsProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.YarnClusterMetricsProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int32 num_node_managers = 1;</code>
     * @return Whether the numNodeManagers field is set.
     */
    boolean hasNumNodeManagers();
    /**
     * <code>optional int32 num_node_managers = 1;</code>
     * @return The numNodeManagers.
     */
    int getNumNodeManagers();

    /**
     * <code>optional int32 num_decommissioned_nms = 2;</code>
     * @return Whether the numDecommissionedNms field is set.
     */
    boolean hasNumDecommissionedNms();
    /**
     * <code>optional int32 num_decommissioned_nms = 2;</code>
     * @return The numDecommissionedNms.
     */
    int getNumDecommissionedNms();

    /**
     * <code>optional int32 num_active_nms = 3;</code>
     * @return Whether the numActiveNms field is set.
     */
    boolean hasNumActiveNms();
    /**
     * <code>optional int32 num_active_nms = 3;</code>
     * @return The numActiveNms.
     */
    int getNumActiveNms();

    /**
     * <code>optional int32 num_lost_nms = 4;</code>
     * @return Whether the numLostNms field is set.
     */
    boolean hasNumLostNms();
    /**
     * <code>optional int32 num_lost_nms = 4;</code>
     * @return The numLostNms.
     */
    int getNumLostNms();

    /**
     * <code>optional int32 num_unhealthy_nms = 5;</code>
     * @return Whether the numUnhealthyNms field is set.
     */
    boolean hasNumUnhealthyNms();
    /**
     * <code>optional int32 num_unhealthy_nms = 5;</code>
     * @return The numUnhealthyNms.
     */
    int getNumUnhealthyNms();

    /**
     * <code>optional int32 num_rebooted_nms = 6;</code>
     * @return Whether the numRebootedNms field is set.
     */
    boolean hasNumRebootedNms();
    /**
     * <code>optional int32 num_rebooted_nms = 6;</code>
     * @return The numRebootedNms.
     */
    int getNumRebootedNms();

    /**
     * <code>optional int32 num_decommissioning_nms = 7;</code>
     * @return Whether the numDecommissioningNms field is set.
     */
    boolean hasNumDecommissioningNms();
    /**
     * <code>optional int32 num_decommissioning_nms = 7;</code>
     * @return The numDecommissioningNms.
     */
    int getNumDecommissioningNms();

    /**
     * <code>optional int32 num_shutdown_nms = 8;</code>
     * @return Whether the numShutdownNms field is set.
     */
    boolean hasNumShutdownNms();
    /**
     * <code>optional int32 num_shutdown_nms = 8;</code>
     * @return The numShutdownNms.
     */
    int getNumShutdownNms();
  }
  /**
   * Protobuf type {@code hadoop.yarn.YarnClusterMetricsProto}
   */
  public static final class YarnClusterMetricsProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.YarnClusterMetricsProto)
      YarnClusterMetricsProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use YarnClusterMetricsProto.newBuilder() to construct.
    private YarnClusterMetricsProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private YarnClusterMetricsProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new YarnClusterMetricsProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_YarnClusterMetricsProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder.class);
    }

    private int bitField0_;
    public static final int NUM_NODE_MANAGERS_FIELD_NUMBER = 1;
    private int numNodeManagers_ = 0;
    /**
     * <code>optional int32 num_node_managers = 1;</code>
     * @return Whether the numNodeManagers field is set.
     */
    @java.lang.Override
    public boolean hasNumNodeManagers() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int32 num_node_managers = 1;</code>
     * @return The numNodeManagers.
     */
    @java.lang.Override
    public int getNumNodeManagers() {
      return numNodeManagers_;
    }

    public static final int NUM_DECOMMISSIONED_NMS_FIELD_NUMBER = 2;
    private int numDecommissionedNms_ = 0;
    /**
     * <code>optional int32 num_decommissioned_nms = 2;</code>
     * @return Whether the numDecommissionedNms field is set.
     */
    @java.lang.Override
    public boolean hasNumDecommissionedNms() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 num_decommissioned_nms = 2;</code>
     * @return The numDecommissionedNms.
     */
    @java.lang.Override
    public int getNumDecommissionedNms() {
      return numDecommissionedNms_;
    }

    public static final int NUM_ACTIVE_NMS_FIELD_NUMBER = 3;
    private int numActiveNms_ = 0;
    /**
     * <code>optional int32 num_active_nms = 3;</code>
     * @return Whether the numActiveNms field is set.
     */
    @java.lang.Override
    public boolean hasNumActiveNms() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int32 num_active_nms = 3;</code>
     * @return The numActiveNms.
     */
    @java.lang.Override
    public int getNumActiveNms() {
      return numActiveNms_;
    }

    public static final int NUM_LOST_NMS_FIELD_NUMBER = 4;
    private int numLostNms_ = 0;
    /**
     * <code>optional int32 num_lost_nms = 4;</code>
     * @return Whether the numLostNms field is set.
     */
    @java.lang.Override
    public boolean hasNumLostNms() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional int32 num_lost_nms = 4;</code>
     * @return The numLostNms.
     */
    @java.lang.Override
    public int getNumLostNms() {
      return numLostNms_;
    }

    public static final int NUM_UNHEALTHY_NMS_FIELD_NUMBER = 5;
    private int numUnhealthyNms_ = 0;
    /**
     * <code>optional int32 num_unhealthy_nms = 5;</code>
     * @return Whether the numUnhealthyNms field is set.
     */
    @java.lang.Override
    public boolean hasNumUnhealthyNms() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional int32 num_unhealthy_nms = 5;</code>
     * @return The numUnhealthyNms.
     */
    @java.lang.Override
    public int getNumUnhealthyNms() {
      return numUnhealthyNms_;
    }

    public static final int NUM_REBOOTED_NMS_FIELD_NUMBER = 6;
    private int numRebootedNms_ = 0;
    /**
     * <code>optional int32 num_rebooted_nms = 6;</code>
     * @return Whether the numRebootedNms field is set.
     */
    @java.lang.Override
    public boolean hasNumRebootedNms() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional int32 num_rebooted_nms = 6;</code>
     * @return The numRebootedNms.
     */
    @java.lang.Override
    public int getNumRebootedNms() {
      return numRebootedNms_;
    }

    public static final int NUM_DECOMMISSIONING_NMS_FIELD_NUMBER = 7;
    private int numDecommissioningNms_ = 0;
    /**
     * <code>optional int32 num_decommissioning_nms = 7;</code>
     * @return Whether the numDecommissioningNms field is set.
     */
    @java.lang.Override
    public boolean hasNumDecommissioningNms() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional int32 num_decommissioning_nms = 7;</code>
     * @return The numDecommissioningNms.
     */
    @java.lang.Override
    public int getNumDecommissioningNms() {
      return numDecommissioningNms_;
    }

    public static final int NUM_SHUTDOWN_NMS_FIELD_NUMBER = 8;
    private int numShutdownNms_ = 0;
    /**
     * <code>optional int32 num_shutdown_nms = 8;</code>
     * @return Whether the numShutdownNms field is set.
     */
    @java.lang.Override
    public boolean hasNumShutdownNms() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional int32 num_shutdown_nms = 8;</code>
     * @return The numShutdownNms.
     */
    @java.lang.Override
    public int getNumShutdownNms() {
      return numShutdownNms_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, numNodeManagers_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, numDecommissionedNms_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt32(3, numActiveNms_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt32(4, numLostNms_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeInt32(5, numUnhealthyNms_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt32(6, numRebootedNms_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeInt32(7, numDecommissioningNms_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeInt32(8, numShutdownNms_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, numNodeManagers_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, numDecommissionedNms_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(3, numActiveNms_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(4, numLostNms_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(5, numUnhealthyNms_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(6, numRebootedNms_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(7, numDecommissioningNms_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(8, numShutdownNms_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto other = (org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto) obj;

      if (hasNumNodeManagers() != other.hasNumNodeManagers()) return false;
      if (hasNumNodeManagers()) {
        if (getNumNodeManagers()
            != other.getNumNodeManagers()) return false;
      }
      if (hasNumDecommissionedNms() != other.hasNumDecommissionedNms()) return false;
      if (hasNumDecommissionedNms()) {
        if (getNumDecommissionedNms()
            != other.getNumDecommissionedNms()) return false;
      }
      if (hasNumActiveNms() != other.hasNumActiveNms()) return false;
      if (hasNumActiveNms()) {
        if (getNumActiveNms()
            != other.getNumActiveNms()) return false;
      }
      if (hasNumLostNms() != other.hasNumLostNms()) return false;
      if (hasNumLostNms()) {
        if (getNumLostNms()
            != other.getNumLostNms()) return false;
      }
      if (hasNumUnhealthyNms() != other.hasNumUnhealthyNms()) return false;
      if (hasNumUnhealthyNms()) {
        if (getNumUnhealthyNms()
            != other.getNumUnhealthyNms()) return false;
      }
      if (hasNumRebootedNms() != other.hasNumRebootedNms()) return false;
      if (hasNumRebootedNms()) {
        if (getNumRebootedNms()
            != other.getNumRebootedNms()) return false;
      }
      if (hasNumDecommissioningNms() != other.hasNumDecommissioningNms()) return false;
      if (hasNumDecommissioningNms()) {
        if (getNumDecommissioningNms()
            != other.getNumDecommissioningNms()) return false;
      }
      if (hasNumShutdownNms() != other.hasNumShutdownNms()) return false;
      if (hasNumShutdownNms()) {
        if (getNumShutdownNms()
            != other.getNumShutdownNms()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNumNodeManagers()) {
        hash = (37 * hash) + NUM_NODE_MANAGERS_FIELD_NUMBER;
        hash = (53 * hash) + getNumNodeManagers();
      }
      if (hasNumDecommissionedNms()) {
        hash = (37 * hash) + NUM_DECOMMISSIONED_NMS_FIELD_NUMBER;
        hash = (53 * hash) + getNumDecommissionedNms();
      }
      if (hasNumActiveNms()) {
        hash = (37 * hash) + NUM_ACTIVE_NMS_FIELD_NUMBER;
        hash = (53 * hash) + getNumActiveNms();
      }
      if (hasNumLostNms()) {
        hash = (37 * hash) + NUM_LOST_NMS_FIELD_NUMBER;
        hash = (53 * hash) + getNumLostNms();
      }
      if (hasNumUnhealthyNms()) {
        hash = (37 * hash) + NUM_UNHEALTHY_NMS_FIELD_NUMBER;
        hash = (53 * hash) + getNumUnhealthyNms();
      }
      if (hasNumRebootedNms()) {
        hash = (37 * hash) + NUM_REBOOTED_NMS_FIELD_NUMBER;
        hash = (53 * hash) + getNumRebootedNms();
      }
      if (hasNumDecommissioningNms()) {
        hash = (37 * hash) + NUM_DECOMMISSIONING_NMS_FIELD_NUMBER;
        hash = (53 * hash) + getNumDecommissioningNms();
      }
      if (hasNumShutdownNms()) {
        hash = (37 * hash) + NUM_SHUTDOWN_NMS_FIELD_NUMBER;
        hash = (53 * hash) + getNumShutdownNms();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.YarnClusterMetricsProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.YarnClusterMetricsProto)
        org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_YarnClusterMetricsProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        numNodeManagers_ = 0;
        numDecommissionedNms_ = 0;
        numActiveNms_ = 0;
        numLostNms_ = 0;
        numUnhealthyNms_ = 0;
        numRebootedNms_ = 0;
        numDecommissioningNms_ = 0;
        numShutdownNms_ = 0;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.numNodeManagers_ = numNodeManagers_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.numDecommissionedNms_ = numDecommissionedNms_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.numActiveNms_ = numActiveNms_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.numLostNms_ = numLostNms_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.numUnhealthyNms_ = numUnhealthyNms_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.numRebootedNms_ = numRebootedNms_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.numDecommissioningNms_ = numDecommissioningNms_;
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.numShutdownNms_ = numShutdownNms_;
          to_bitField0_ |= 0x00000080;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto.getDefaultInstance()) return this;
        if (other.hasNumNodeManagers()) {
          setNumNodeManagers(other.getNumNodeManagers());
        }
        if (other.hasNumDecommissionedNms()) {
          setNumDecommissionedNms(other.getNumDecommissionedNms());
        }
        if (other.hasNumActiveNms()) {
          setNumActiveNms(other.getNumActiveNms());
        }
        if (other.hasNumLostNms()) {
          setNumLostNms(other.getNumLostNms());
        }
        if (other.hasNumUnhealthyNms()) {
          setNumUnhealthyNms(other.getNumUnhealthyNms());
        }
        if (other.hasNumRebootedNms()) {
          setNumRebootedNms(other.getNumRebootedNms());
        }
        if (other.hasNumDecommissioningNms()) {
          setNumDecommissioningNms(other.getNumDecommissioningNms());
        }
        if (other.hasNumShutdownNms()) {
          setNumShutdownNms(other.getNumShutdownNms());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                numNodeManagers_ = input.readInt32();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 16: {
                numDecommissionedNms_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 24: {
                numActiveNms_ = input.readInt32();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 32: {
                numLostNms_ = input.readInt32();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              case 40: {
                numUnhealthyNms_ = input.readInt32();
                bitField0_ |= 0x00000010;
                break;
              } // case 40
              case 48: {
                numRebootedNms_ = input.readInt32();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 56: {
                numDecommissioningNms_ = input.readInt32();
                bitField0_ |= 0x00000040;
                break;
              } // case 56
              case 64: {
                numShutdownNms_ = input.readInt32();
                bitField0_ |= 0x00000080;
                break;
              } // case 64
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int numNodeManagers_ ;
      /**
       * <code>optional int32 num_node_managers = 1;</code>
       * @return Whether the numNodeManagers field is set.
       */
      @java.lang.Override
      public boolean hasNumNodeManagers() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int32 num_node_managers = 1;</code>
       * @return The numNodeManagers.
       */
      @java.lang.Override
      public int getNumNodeManagers() {
        return numNodeManagers_;
      }
      /**
       * <code>optional int32 num_node_managers = 1;</code>
       * @param value The numNodeManagers to set.
       * @return This builder for chaining.
       */
      public Builder setNumNodeManagers(int value) {

        numNodeManagers_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_node_managers = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumNodeManagers() {
        bitField0_ = (bitField0_ & ~0x00000001);
        numNodeManagers_ = 0;
        onChanged();
        return this;
      }

      private int numDecommissionedNms_ ;
      /**
       * <code>optional int32 num_decommissioned_nms = 2;</code>
       * @return Whether the numDecommissionedNms field is set.
       */
      @java.lang.Override
      public boolean hasNumDecommissionedNms() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 num_decommissioned_nms = 2;</code>
       * @return The numDecommissionedNms.
       */
      @java.lang.Override
      public int getNumDecommissionedNms() {
        return numDecommissionedNms_;
      }
      /**
       * <code>optional int32 num_decommissioned_nms = 2;</code>
       * @param value The numDecommissionedNms to set.
       * @return This builder for chaining.
       */
      public Builder setNumDecommissionedNms(int value) {

        numDecommissionedNms_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_decommissioned_nms = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumDecommissionedNms() {
        bitField0_ = (bitField0_ & ~0x00000002);
        numDecommissionedNms_ = 0;
        onChanged();
        return this;
      }

      private int numActiveNms_ ;
      /**
       * <code>optional int32 num_active_nms = 3;</code>
       * @return Whether the numActiveNms field is set.
       */
      @java.lang.Override
      public boolean hasNumActiveNms() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int32 num_active_nms = 3;</code>
       * @return The numActiveNms.
       */
      @java.lang.Override
      public int getNumActiveNms() {
        return numActiveNms_;
      }
      /**
       * <code>optional int32 num_active_nms = 3;</code>
       * @param value The numActiveNms to set.
       * @return This builder for chaining.
       */
      public Builder setNumActiveNms(int value) {

        numActiveNms_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_active_nms = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumActiveNms() {
        bitField0_ = (bitField0_ & ~0x00000004);
        numActiveNms_ = 0;
        onChanged();
        return this;
      }

      private int numLostNms_ ;
      /**
       * <code>optional int32 num_lost_nms = 4;</code>
       * @return Whether the numLostNms field is set.
       */
      @java.lang.Override
      public boolean hasNumLostNms() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int32 num_lost_nms = 4;</code>
       * @return The numLostNms.
       */
      @java.lang.Override
      public int getNumLostNms() {
        return numLostNms_;
      }
      /**
       * <code>optional int32 num_lost_nms = 4;</code>
       * @param value The numLostNms to set.
       * @return This builder for chaining.
       */
      public Builder setNumLostNms(int value) {

        numLostNms_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_lost_nms = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumLostNms() {
        bitField0_ = (bitField0_ & ~0x00000008);
        numLostNms_ = 0;
        onChanged();
        return this;
      }

      private int numUnhealthyNms_ ;
      /**
       * <code>optional int32 num_unhealthy_nms = 5;</code>
       * @return Whether the numUnhealthyNms field is set.
       */
      @java.lang.Override
      public boolean hasNumUnhealthyNms() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional int32 num_unhealthy_nms = 5;</code>
       * @return The numUnhealthyNms.
       */
      @java.lang.Override
      public int getNumUnhealthyNms() {
        return numUnhealthyNms_;
      }
      /**
       * <code>optional int32 num_unhealthy_nms = 5;</code>
       * @param value The numUnhealthyNms to set.
       * @return This builder for chaining.
       */
      public Builder setNumUnhealthyNms(int value) {

        numUnhealthyNms_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_unhealthy_nms = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumUnhealthyNms() {
        bitField0_ = (bitField0_ & ~0x00000010);
        numUnhealthyNms_ = 0;
        onChanged();
        return this;
      }

      private int numRebootedNms_ ;
      /**
       * <code>optional int32 num_rebooted_nms = 6;</code>
       * @return Whether the numRebootedNms field is set.
       */
      @java.lang.Override
      public boolean hasNumRebootedNms() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional int32 num_rebooted_nms = 6;</code>
       * @return The numRebootedNms.
       */
      @java.lang.Override
      public int getNumRebootedNms() {
        return numRebootedNms_;
      }
      /**
       * <code>optional int32 num_rebooted_nms = 6;</code>
       * @param value The numRebootedNms to set.
       * @return This builder for chaining.
       */
      public Builder setNumRebootedNms(int value) {

        numRebootedNms_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_rebooted_nms = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumRebootedNms() {
        bitField0_ = (bitField0_ & ~0x00000020);
        numRebootedNms_ = 0;
        onChanged();
        return this;
      }

      private int numDecommissioningNms_ ;
      /**
       * <code>optional int32 num_decommissioning_nms = 7;</code>
       * @return Whether the numDecommissioningNms field is set.
       */
      @java.lang.Override
      public boolean hasNumDecommissioningNms() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional int32 num_decommissioning_nms = 7;</code>
       * @return The numDecommissioningNms.
       */
      @java.lang.Override
      public int getNumDecommissioningNms() {
        return numDecommissioningNms_;
      }
      /**
       * <code>optional int32 num_decommissioning_nms = 7;</code>
       * @param value The numDecommissioningNms to set.
       * @return This builder for chaining.
       */
      public Builder setNumDecommissioningNms(int value) {

        numDecommissioningNms_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_decommissioning_nms = 7;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumDecommissioningNms() {
        bitField0_ = (bitField0_ & ~0x00000040);
        numDecommissioningNms_ = 0;
        onChanged();
        return this;
      }

      private int numShutdownNms_ ;
      /**
       * <code>optional int32 num_shutdown_nms = 8;</code>
       * @return Whether the numShutdownNms field is set.
       */
      @java.lang.Override
      public boolean hasNumShutdownNms() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional int32 num_shutdown_nms = 8;</code>
       * @return The numShutdownNms.
       */
      @java.lang.Override
      public int getNumShutdownNms() {
        return numShutdownNms_;
      }
      /**
       * <code>optional int32 num_shutdown_nms = 8;</code>
       * @param value The numShutdownNms to set.
       * @return This builder for chaining.
       */
      public Builder setNumShutdownNms(int value) {

        numShutdownNms_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_shutdown_nms = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumShutdownNms() {
        bitField0_ = (bitField0_ & ~0x00000080);
        numShutdownNms_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.YarnClusterMetricsProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.YarnClusterMetricsProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<YarnClusterMetricsProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<YarnClusterMetricsProto>() {
      @java.lang.Override
      public YarnClusterMetricsProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<YarnClusterMetricsProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<YarnClusterMetricsProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface QueueStatisticsProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.QueueStatisticsProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int64 numAppsSubmitted = 1;</code>
     * @return Whether the numAppsSubmitted field is set.
     */
    boolean hasNumAppsSubmitted();
    /**
     * <code>optional int64 numAppsSubmitted = 1;</code>
     * @return The numAppsSubmitted.
     */
    long getNumAppsSubmitted();

    /**
     * <code>optional int64 numAppsRunning = 2;</code>
     * @return Whether the numAppsRunning field is set.
     */
    boolean hasNumAppsRunning();
    /**
     * <code>optional int64 numAppsRunning = 2;</code>
     * @return The numAppsRunning.
     */
    long getNumAppsRunning();

    /**
     * <code>optional int64 numAppsPending = 3;</code>
     * @return Whether the numAppsPending field is set.
     */
    boolean hasNumAppsPending();
    /**
     * <code>optional int64 numAppsPending = 3;</code>
     * @return The numAppsPending.
     */
    long getNumAppsPending();

    /**
     * <code>optional int64 numAppsCompleted = 4;</code>
     * @return Whether the numAppsCompleted field is set.
     */
    boolean hasNumAppsCompleted();
    /**
     * <code>optional int64 numAppsCompleted = 4;</code>
     * @return The numAppsCompleted.
     */
    long getNumAppsCompleted();

    /**
     * <code>optional int64 numAppsKilled = 5;</code>
     * @return Whether the numAppsKilled field is set.
     */
    boolean hasNumAppsKilled();
    /**
     * <code>optional int64 numAppsKilled = 5;</code>
     * @return The numAppsKilled.
     */
    long getNumAppsKilled();

    /**
     * <code>optional int64 numAppsFailed = 6;</code>
     * @return Whether the numAppsFailed field is set.
     */
    boolean hasNumAppsFailed();
    /**
     * <code>optional int64 numAppsFailed = 6;</code>
     * @return The numAppsFailed.
     */
    long getNumAppsFailed();

    /**
     * <code>optional int64 numActiveUsers = 7;</code>
     * @return Whether the numActiveUsers field is set.
     */
    boolean hasNumActiveUsers();
    /**
     * <code>optional int64 numActiveUsers = 7;</code>
     * @return The numActiveUsers.
     */
    long getNumActiveUsers();

    /**
     * <code>optional int64 availableMemoryMB = 8;</code>
     * @return Whether the availableMemoryMB field is set.
     */
    boolean hasAvailableMemoryMB();
    /**
     * <code>optional int64 availableMemoryMB = 8;</code>
     * @return The availableMemoryMB.
     */
    long getAvailableMemoryMB();

    /**
     * <code>optional int64 allocatedMemoryMB = 9;</code>
     * @return Whether the allocatedMemoryMB field is set.
     */
    boolean hasAllocatedMemoryMB();
    /**
     * <code>optional int64 allocatedMemoryMB = 9;</code>
     * @return The allocatedMemoryMB.
     */
    long getAllocatedMemoryMB();

    /**
     * <code>optional int64 pendingMemoryMB = 10;</code>
     * @return Whether the pendingMemoryMB field is set.
     */
    boolean hasPendingMemoryMB();
    /**
     * <code>optional int64 pendingMemoryMB = 10;</code>
     * @return The pendingMemoryMB.
     */
    long getPendingMemoryMB();

    /**
     * <code>optional int64 reservedMemoryMB = 11;</code>
     * @return Whether the reservedMemoryMB field is set.
     */
    boolean hasReservedMemoryMB();
    /**
     * <code>optional int64 reservedMemoryMB = 11;</code>
     * @return The reservedMemoryMB.
     */
    long getReservedMemoryMB();

    /**
     * <code>optional int64 availableVCores = 12;</code>
     * @return Whether the availableVCores field is set.
     */
    boolean hasAvailableVCores();
    /**
     * <code>optional int64 availableVCores = 12;</code>
     * @return The availableVCores.
     */
    long getAvailableVCores();

    /**
     * <code>optional int64 allocatedVCores = 13;</code>
     * @return Whether the allocatedVCores field is set.
     */
    boolean hasAllocatedVCores();
    /**
     * <code>optional int64 allocatedVCores = 13;</code>
     * @return The allocatedVCores.
     */
    long getAllocatedVCores();

    /**
     * <code>optional int64 pendingVCores = 14;</code>
     * @return Whether the pendingVCores field is set.
     */
    boolean hasPendingVCores();
    /**
     * <code>optional int64 pendingVCores = 14;</code>
     * @return The pendingVCores.
     */
    long getPendingVCores();

    /**
     * <code>optional int64 reservedVCores = 15;</code>
     * @return Whether the reservedVCores field is set.
     */
    boolean hasReservedVCores();
    /**
     * <code>optional int64 reservedVCores = 15;</code>
     * @return The reservedVCores.
     */
    long getReservedVCores();

    /**
     * <code>optional int64 allocatedContainers = 16;</code>
     * @return Whether the allocatedContainers field is set.
     */
    boolean hasAllocatedContainers();
    /**
     * <code>optional int64 allocatedContainers = 16;</code>
     * @return The allocatedContainers.
     */
    long getAllocatedContainers();

    /**
     * <code>optional int64 pendingContainers = 17;</code>
     * @return Whether the pendingContainers field is set.
     */
    boolean hasPendingContainers();
    /**
     * <code>optional int64 pendingContainers = 17;</code>
     * @return The pendingContainers.
     */
    long getPendingContainers();

    /**
     * <code>optional int64 reservedContainers = 18;</code>
     * @return Whether the reservedContainers field is set.
     */
    boolean hasReservedContainers();
    /**
     * <code>optional int64 reservedContainers = 18;</code>
     * @return The reservedContainers.
     */
    long getReservedContainers();
  }
  /**
   * Protobuf type {@code hadoop.yarn.QueueStatisticsProto}
   */
  public static final class QueueStatisticsProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.QueueStatisticsProto)
      QueueStatisticsProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use QueueStatisticsProto.newBuilder() to construct.
    private QueueStatisticsProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private QueueStatisticsProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new QueueStatisticsProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueStatisticsProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueStatisticsProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder.class);
    }

    private int bitField0_;
    public static final int NUMAPPSSUBMITTED_FIELD_NUMBER = 1;
    private long numAppsSubmitted_ = 0L;
    /**
     * <code>optional int64 numAppsSubmitted = 1;</code>
     * @return Whether the numAppsSubmitted field is set.
     */
    @java.lang.Override
    public boolean hasNumAppsSubmitted() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int64 numAppsSubmitted = 1;</code>
     * @return The numAppsSubmitted.
     */
    @java.lang.Override
    public long getNumAppsSubmitted() {
      return numAppsSubmitted_;
    }

    public static final int NUMAPPSRUNNING_FIELD_NUMBER = 2;
    private long numAppsRunning_ = 0L;
    /**
     * <code>optional int64 numAppsRunning = 2;</code>
     * @return Whether the numAppsRunning field is set.
     */
    @java.lang.Override
    public boolean hasNumAppsRunning() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 numAppsRunning = 2;</code>
     * @return The numAppsRunning.
     */
    @java.lang.Override
    public long getNumAppsRunning() {
      return numAppsRunning_;
    }

    public static final int NUMAPPSPENDING_FIELD_NUMBER = 3;
    private long numAppsPending_ = 0L;
    /**
     * <code>optional int64 numAppsPending = 3;</code>
     * @return Whether the numAppsPending field is set.
     */
    @java.lang.Override
    public boolean hasNumAppsPending() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int64 numAppsPending = 3;</code>
     * @return The numAppsPending.
     */
    @java.lang.Override
    public long getNumAppsPending() {
      return numAppsPending_;
    }

    public static final int NUMAPPSCOMPLETED_FIELD_NUMBER = 4;
    private long numAppsCompleted_ = 0L;
    /**
     * <code>optional int64 numAppsCompleted = 4;</code>
     * @return Whether the numAppsCompleted field is set.
     */
    @java.lang.Override
    public boolean hasNumAppsCompleted() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional int64 numAppsCompleted = 4;</code>
     * @return The numAppsCompleted.
     */
    @java.lang.Override
    public long getNumAppsCompleted() {
      return numAppsCompleted_;
    }

    public static final int NUMAPPSKILLED_FIELD_NUMBER = 5;
    private long numAppsKilled_ = 0L;
    /**
     * <code>optional int64 numAppsKilled = 5;</code>
     * @return Whether the numAppsKilled field is set.
     */
    @java.lang.Override
    public boolean hasNumAppsKilled() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional int64 numAppsKilled = 5;</code>
     * @return The numAppsKilled.
     */
    @java.lang.Override
    public long getNumAppsKilled() {
      return numAppsKilled_;
    }

    public static final int NUMAPPSFAILED_FIELD_NUMBER = 6;
    private long numAppsFailed_ = 0L;
    /**
     * <code>optional int64 numAppsFailed = 6;</code>
     * @return Whether the numAppsFailed field is set.
     */
    @java.lang.Override
    public boolean hasNumAppsFailed() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional int64 numAppsFailed = 6;</code>
     * @return The numAppsFailed.
     */
    @java.lang.Override
    public long getNumAppsFailed() {
      return numAppsFailed_;
    }

    public static final int NUMACTIVEUSERS_FIELD_NUMBER = 7;
    private long numActiveUsers_ = 0L;
    /**
     * <code>optional int64 numActiveUsers = 7;</code>
     * @return Whether the numActiveUsers field is set.
     */
    @java.lang.Override
    public boolean hasNumActiveUsers() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional int64 numActiveUsers = 7;</code>
     * @return The numActiveUsers.
     */
    @java.lang.Override
    public long getNumActiveUsers() {
      return numActiveUsers_;
    }

    public static final int AVAILABLEMEMORYMB_FIELD_NUMBER = 8;
    private long availableMemoryMB_ = 0L;
    /**
     * <code>optional int64 availableMemoryMB = 8;</code>
     * @return Whether the availableMemoryMB field is set.
     */
    @java.lang.Override
    public boolean hasAvailableMemoryMB() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional int64 availableMemoryMB = 8;</code>
     * @return The availableMemoryMB.
     */
    @java.lang.Override
    public long getAvailableMemoryMB() {
      return availableMemoryMB_;
    }

    public static final int ALLOCATEDMEMORYMB_FIELD_NUMBER = 9;
    private long allocatedMemoryMB_ = 0L;
    /**
     * <code>optional int64 allocatedMemoryMB = 9;</code>
     * @return Whether the allocatedMemoryMB field is set.
     */
    @java.lang.Override
    public boolean hasAllocatedMemoryMB() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional int64 allocatedMemoryMB = 9;</code>
     * @return The allocatedMemoryMB.
     */
    @java.lang.Override
    public long getAllocatedMemoryMB() {
      return allocatedMemoryMB_;
    }

    public static final int PENDINGMEMORYMB_FIELD_NUMBER = 10;
    private long pendingMemoryMB_ = 0L;
    /**
     * <code>optional int64 pendingMemoryMB = 10;</code>
     * @return Whether the pendingMemoryMB field is set.
     */
    @java.lang.Override
    public boolean hasPendingMemoryMB() {
      return ((bitField0_ & 0x00000200) != 0);
    }
    /**
     * <code>optional int64 pendingMemoryMB = 10;</code>
     * @return The pendingMemoryMB.
     */
    @java.lang.Override
    public long getPendingMemoryMB() {
      return pendingMemoryMB_;
    }

    public static final int RESERVEDMEMORYMB_FIELD_NUMBER = 11;
    private long reservedMemoryMB_ = 0L;
    /**
     * <code>optional int64 reservedMemoryMB = 11;</code>
     * @return Whether the reservedMemoryMB field is set.
     */
    @java.lang.Override
    public boolean hasReservedMemoryMB() {
      return ((bitField0_ & 0x00000400) != 0);
    }
    /**
     * <code>optional int64 reservedMemoryMB = 11;</code>
     * @return The reservedMemoryMB.
     */
    @java.lang.Override
    public long getReservedMemoryMB() {
      return reservedMemoryMB_;
    }

    public static final int AVAILABLEVCORES_FIELD_NUMBER = 12;
    private long availableVCores_ = 0L;
    /**
     * <code>optional int64 availableVCores = 12;</code>
     * @return Whether the availableVCores field is set.
     */
    @java.lang.Override
    public boolean hasAvailableVCores() {
      return ((bitField0_ & 0x00000800) != 0);
    }
    /**
     * <code>optional int64 availableVCores = 12;</code>
     * @return The availableVCores.
     */
    @java.lang.Override
    public long getAvailableVCores() {
      return availableVCores_;
    }

    public static final int ALLOCATEDVCORES_FIELD_NUMBER = 13;
    private long allocatedVCores_ = 0L;
    /**
     * <code>optional int64 allocatedVCores = 13;</code>
     * @return Whether the allocatedVCores field is set.
     */
    @java.lang.Override
    public boolean hasAllocatedVCores() {
      return ((bitField0_ & 0x00001000) != 0);
    }
    /**
     * <code>optional int64 allocatedVCores = 13;</code>
     * @return The allocatedVCores.
     */
    @java.lang.Override
    public long getAllocatedVCores() {
      return allocatedVCores_;
    }

    public static final int PENDINGVCORES_FIELD_NUMBER = 14;
    private long pendingVCores_ = 0L;
    /**
     * <code>optional int64 pendingVCores = 14;</code>
     * @return Whether the pendingVCores field is set.
     */
    @java.lang.Override
    public boolean hasPendingVCores() {
      return ((bitField0_ & 0x00002000) != 0);
    }
    /**
     * <code>optional int64 pendingVCores = 14;</code>
     * @return The pendingVCores.
     */
    @java.lang.Override
    public long getPendingVCores() {
      return pendingVCores_;
    }

    public static final int RESERVEDVCORES_FIELD_NUMBER = 15;
    private long reservedVCores_ = 0L;
    /**
     * <code>optional int64 reservedVCores = 15;</code>
     * @return Whether the reservedVCores field is set.
     */
    @java.lang.Override
    public boolean hasReservedVCores() {
      return ((bitField0_ & 0x00004000) != 0);
    }
    /**
     * <code>optional int64 reservedVCores = 15;</code>
     * @return The reservedVCores.
     */
    @java.lang.Override
    public long getReservedVCores() {
      return reservedVCores_;
    }

    public static final int ALLOCATEDCONTAINERS_FIELD_NUMBER = 16;
    private long allocatedContainers_ = 0L;
    /**
     * <code>optional int64 allocatedContainers = 16;</code>
     * @return Whether the allocatedContainers field is set.
     */
    @java.lang.Override
    public boolean hasAllocatedContainers() {
      return ((bitField0_ & 0x00008000) != 0);
    }
    /**
     * <code>optional int64 allocatedContainers = 16;</code>
     * @return The allocatedContainers.
     */
    @java.lang.Override
    public long getAllocatedContainers() {
      return allocatedContainers_;
    }

    public static final int PENDINGCONTAINERS_FIELD_NUMBER = 17;
    private long pendingContainers_ = 0L;
    /**
     * <code>optional int64 pendingContainers = 17;</code>
     * @return Whether the pendingContainers field is set.
     */
    @java.lang.Override
    public boolean hasPendingContainers() {
      return ((bitField0_ & 0x00010000) != 0);
    }
    /**
     * <code>optional int64 pendingContainers = 17;</code>
     * @return The pendingContainers.
     */
    @java.lang.Override
    public long getPendingContainers() {
      return pendingContainers_;
    }

    public static final int RESERVEDCONTAINERS_FIELD_NUMBER = 18;
    private long reservedContainers_ = 0L;
    /**
     * <code>optional int64 reservedContainers = 18;</code>
     * @return Whether the reservedContainers field is set.
     */
    @java.lang.Override
    public boolean hasReservedContainers() {
      return ((bitField0_ & 0x00020000) != 0);
    }
    /**
     * <code>optional int64 reservedContainers = 18;</code>
     * @return The reservedContainers.
     */
    @java.lang.Override
    public long getReservedContainers() {
      return reservedContainers_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt64(1, numAppsSubmitted_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, numAppsRunning_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(3, numAppsPending_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt64(4, numAppsCompleted_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeInt64(5, numAppsKilled_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt64(6, numAppsFailed_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeInt64(7, numActiveUsers_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeInt64(8, availableMemoryMB_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeInt64(9, allocatedMemoryMB_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        output.writeInt64(10, pendingMemoryMB_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        output.writeInt64(11, reservedMemoryMB_);
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        output.writeInt64(12, availableVCores_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        output.writeInt64(13, allocatedVCores_);
      }
      if (((bitField0_ & 0x00002000) != 0)) {
        output.writeInt64(14, pendingVCores_);
      }
      if (((bitField0_ & 0x00004000) != 0)) {
        output.writeInt64(15, reservedVCores_);
      }
      if (((bitField0_ & 0x00008000) != 0)) {
        output.writeInt64(16, allocatedContainers_);
      }
      if (((bitField0_ & 0x00010000) != 0)) {
        output.writeInt64(17, pendingContainers_);
      }
      if (((bitField0_ & 0x00020000) != 0)) {
        output.writeInt64(18, reservedContainers_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(1, numAppsSubmitted_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, numAppsRunning_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(3, numAppsPending_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(4, numAppsCompleted_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(5, numAppsKilled_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(6, numAppsFailed_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(7, numActiveUsers_);
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(8, availableMemoryMB_);
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(9, allocatedMemoryMB_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(10, pendingMemoryMB_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(11, reservedMemoryMB_);
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(12, availableVCores_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(13, allocatedVCores_);
      }
      if (((bitField0_ & 0x00002000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(14, pendingVCores_);
      }
      if (((bitField0_ & 0x00004000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(15, reservedVCores_);
      }
      if (((bitField0_ & 0x00008000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(16, allocatedContainers_);
      }
      if (((bitField0_ & 0x00010000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(17, pendingContainers_);
      }
      if (((bitField0_ & 0x00020000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(18, reservedContainers_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto other = (org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto) obj;

      if (hasNumAppsSubmitted() != other.hasNumAppsSubmitted()) return false;
      if (hasNumAppsSubmitted()) {
        if (getNumAppsSubmitted()
            != other.getNumAppsSubmitted()) return false;
      }
      if (hasNumAppsRunning() != other.hasNumAppsRunning()) return false;
      if (hasNumAppsRunning()) {
        if (getNumAppsRunning()
            != other.getNumAppsRunning()) return false;
      }
      if (hasNumAppsPending() != other.hasNumAppsPending()) return false;
      if (hasNumAppsPending()) {
        if (getNumAppsPending()
            != other.getNumAppsPending()) return false;
      }
      if (hasNumAppsCompleted() != other.hasNumAppsCompleted()) return false;
      if (hasNumAppsCompleted()) {
        if (getNumAppsCompleted()
            != other.getNumAppsCompleted()) return false;
      }
      if (hasNumAppsKilled() != other.hasNumAppsKilled()) return false;
      if (hasNumAppsKilled()) {
        if (getNumAppsKilled()
            != other.getNumAppsKilled()) return false;
      }
      if (hasNumAppsFailed() != other.hasNumAppsFailed()) return false;
      if (hasNumAppsFailed()) {
        if (getNumAppsFailed()
            != other.getNumAppsFailed()) return false;
      }
      if (hasNumActiveUsers() != other.hasNumActiveUsers()) return false;
      if (hasNumActiveUsers()) {
        if (getNumActiveUsers()
            != other.getNumActiveUsers()) return false;
      }
      if (hasAvailableMemoryMB() != other.hasAvailableMemoryMB()) return false;
      if (hasAvailableMemoryMB()) {
        if (getAvailableMemoryMB()
            != other.getAvailableMemoryMB()) return false;
      }
      if (hasAllocatedMemoryMB() != other.hasAllocatedMemoryMB()) return false;
      if (hasAllocatedMemoryMB()) {
        if (getAllocatedMemoryMB()
            != other.getAllocatedMemoryMB()) return false;
      }
      if (hasPendingMemoryMB() != other.hasPendingMemoryMB()) return false;
      if (hasPendingMemoryMB()) {
        if (getPendingMemoryMB()
            != other.getPendingMemoryMB()) return false;
      }
      if (hasReservedMemoryMB() != other.hasReservedMemoryMB()) return false;
      if (hasReservedMemoryMB()) {
        if (getReservedMemoryMB()
            != other.getReservedMemoryMB()) return false;
      }
      if (hasAvailableVCores() != other.hasAvailableVCores()) return false;
      if (hasAvailableVCores()) {
        if (getAvailableVCores()
            != other.getAvailableVCores()) return false;
      }
      if (hasAllocatedVCores() != other.hasAllocatedVCores()) return false;
      if (hasAllocatedVCores()) {
        if (getAllocatedVCores()
            != other.getAllocatedVCores()) return false;
      }
      if (hasPendingVCores() != other.hasPendingVCores()) return false;
      if (hasPendingVCores()) {
        if (getPendingVCores()
            != other.getPendingVCores()) return false;
      }
      if (hasReservedVCores() != other.hasReservedVCores()) return false;
      if (hasReservedVCores()) {
        if (getReservedVCores()
            != other.getReservedVCores()) return false;
      }
      if (hasAllocatedContainers() != other.hasAllocatedContainers()) return false;
      if (hasAllocatedContainers()) {
        if (getAllocatedContainers()
            != other.getAllocatedContainers()) return false;
      }
      if (hasPendingContainers() != other.hasPendingContainers()) return false;
      if (hasPendingContainers()) {
        if (getPendingContainers()
            != other.getPendingContainers()) return false;
      }
      if (hasReservedContainers() != other.hasReservedContainers()) return false;
      if (hasReservedContainers()) {
        if (getReservedContainers()
            != other.getReservedContainers()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasNumAppsSubmitted()) {
        hash = (37 * hash) + NUMAPPSSUBMITTED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getNumAppsSubmitted());
      }
      if (hasNumAppsRunning()) {
        hash = (37 * hash) + NUMAPPSRUNNING_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getNumAppsRunning());
      }
      if (hasNumAppsPending()) {
        hash = (37 * hash) + NUMAPPSPENDING_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getNumAppsPending());
      }
      if (hasNumAppsCompleted()) {
        hash = (37 * hash) + NUMAPPSCOMPLETED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getNumAppsCompleted());
      }
      if (hasNumAppsKilled()) {
        hash = (37 * hash) + NUMAPPSKILLED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getNumAppsKilled());
      }
      if (hasNumAppsFailed()) {
        hash = (37 * hash) + NUMAPPSFAILED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getNumAppsFailed());
      }
      if (hasNumActiveUsers()) {
        hash = (37 * hash) + NUMACTIVEUSERS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getNumActiveUsers());
      }
      if (hasAvailableMemoryMB()) {
        hash = (37 * hash) + AVAILABLEMEMORYMB_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAvailableMemoryMB());
      }
      if (hasAllocatedMemoryMB()) {
        hash = (37 * hash) + ALLOCATEDMEMORYMB_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAllocatedMemoryMB());
      }
      if (hasPendingMemoryMB()) {
        hash = (37 * hash) + PENDINGMEMORYMB_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getPendingMemoryMB());
      }
      if (hasReservedMemoryMB()) {
        hash = (37 * hash) + RESERVEDMEMORYMB_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getReservedMemoryMB());
      }
      if (hasAvailableVCores()) {
        hash = (37 * hash) + AVAILABLEVCORES_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAvailableVCores());
      }
      if (hasAllocatedVCores()) {
        hash = (37 * hash) + ALLOCATEDVCORES_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAllocatedVCores());
      }
      if (hasPendingVCores()) {
        hash = (37 * hash) + PENDINGVCORES_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getPendingVCores());
      }
      if (hasReservedVCores()) {
        hash = (37 * hash) + RESERVEDVCORES_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getReservedVCores());
      }
      if (hasAllocatedContainers()) {
        hash = (37 * hash) + ALLOCATEDCONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAllocatedContainers());
      }
      if (hasPendingContainers()) {
        hash = (37 * hash) + PENDINGCONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getPendingContainers());
      }
      if (hasReservedContainers()) {
        hash = (37 * hash) + RESERVEDCONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getReservedContainers());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.QueueStatisticsProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.QueueStatisticsProto)
        org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueStatisticsProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueStatisticsProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        numAppsSubmitted_ = 0L;
        numAppsRunning_ = 0L;
        numAppsPending_ = 0L;
        numAppsCompleted_ = 0L;
        numAppsKilled_ = 0L;
        numAppsFailed_ = 0L;
        numActiveUsers_ = 0L;
        availableMemoryMB_ = 0L;
        allocatedMemoryMB_ = 0L;
        pendingMemoryMB_ = 0L;
        reservedMemoryMB_ = 0L;
        availableVCores_ = 0L;
        allocatedVCores_ = 0L;
        pendingVCores_ = 0L;
        reservedVCores_ = 0L;
        allocatedContainers_ = 0L;
        pendingContainers_ = 0L;
        reservedContainers_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueStatisticsProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.numAppsSubmitted_ = numAppsSubmitted_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.numAppsRunning_ = numAppsRunning_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.numAppsPending_ = numAppsPending_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.numAppsCompleted_ = numAppsCompleted_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.numAppsKilled_ = numAppsKilled_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.numAppsFailed_ = numAppsFailed_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.numActiveUsers_ = numActiveUsers_;
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.availableMemoryMB_ = availableMemoryMB_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.allocatedMemoryMB_ = allocatedMemoryMB_;
          to_bitField0_ |= 0x00000100;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          result.pendingMemoryMB_ = pendingMemoryMB_;
          to_bitField0_ |= 0x00000200;
        }
        if (((from_bitField0_ & 0x00000400) != 0)) {
          result.reservedMemoryMB_ = reservedMemoryMB_;
          to_bitField0_ |= 0x00000400;
        }
        if (((from_bitField0_ & 0x00000800) != 0)) {
          result.availableVCores_ = availableVCores_;
          to_bitField0_ |= 0x00000800;
        }
        if (((from_bitField0_ & 0x00001000) != 0)) {
          result.allocatedVCores_ = allocatedVCores_;
          to_bitField0_ |= 0x00001000;
        }
        if (((from_bitField0_ & 0x00002000) != 0)) {
          result.pendingVCores_ = pendingVCores_;
          to_bitField0_ |= 0x00002000;
        }
        if (((from_bitField0_ & 0x00004000) != 0)) {
          result.reservedVCores_ = reservedVCores_;
          to_bitField0_ |= 0x00004000;
        }
        if (((from_bitField0_ & 0x00008000) != 0)) {
          result.allocatedContainers_ = allocatedContainers_;
          to_bitField0_ |= 0x00008000;
        }
        if (((from_bitField0_ & 0x00010000) != 0)) {
          result.pendingContainers_ = pendingContainers_;
          to_bitField0_ |= 0x00010000;
        }
        if (((from_bitField0_ & 0x00020000) != 0)) {
          result.reservedContainers_ = reservedContainers_;
          to_bitField0_ |= 0x00020000;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance()) return this;
        if (other.hasNumAppsSubmitted()) {
          setNumAppsSubmitted(other.getNumAppsSubmitted());
        }
        if (other.hasNumAppsRunning()) {
          setNumAppsRunning(other.getNumAppsRunning());
        }
        if (other.hasNumAppsPending()) {
          setNumAppsPending(other.getNumAppsPending());
        }
        if (other.hasNumAppsCompleted()) {
          setNumAppsCompleted(other.getNumAppsCompleted());
        }
        if (other.hasNumAppsKilled()) {
          setNumAppsKilled(other.getNumAppsKilled());
        }
        if (other.hasNumAppsFailed()) {
          setNumAppsFailed(other.getNumAppsFailed());
        }
        if (other.hasNumActiveUsers()) {
          setNumActiveUsers(other.getNumActiveUsers());
        }
        if (other.hasAvailableMemoryMB()) {
          setAvailableMemoryMB(other.getAvailableMemoryMB());
        }
        if (other.hasAllocatedMemoryMB()) {
          setAllocatedMemoryMB(other.getAllocatedMemoryMB());
        }
        if (other.hasPendingMemoryMB()) {
          setPendingMemoryMB(other.getPendingMemoryMB());
        }
        if (other.hasReservedMemoryMB()) {
          setReservedMemoryMB(other.getReservedMemoryMB());
        }
        if (other.hasAvailableVCores()) {
          setAvailableVCores(other.getAvailableVCores());
        }
        if (other.hasAllocatedVCores()) {
          setAllocatedVCores(other.getAllocatedVCores());
        }
        if (other.hasPendingVCores()) {
          setPendingVCores(other.getPendingVCores());
        }
        if (other.hasReservedVCores()) {
          setReservedVCores(other.getReservedVCores());
        }
        if (other.hasAllocatedContainers()) {
          setAllocatedContainers(other.getAllocatedContainers());
        }
        if (other.hasPendingContainers()) {
          setPendingContainers(other.getPendingContainers());
        }
        if (other.hasReservedContainers()) {
          setReservedContainers(other.getReservedContainers());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                numAppsSubmitted_ = input.readInt64();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 16: {
                numAppsRunning_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 24: {
                numAppsPending_ = input.readInt64();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 32: {
                numAppsCompleted_ = input.readInt64();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              case 40: {
                numAppsKilled_ = input.readInt64();
                bitField0_ |= 0x00000010;
                break;
              } // case 40
              case 48: {
                numAppsFailed_ = input.readInt64();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 56: {
                numActiveUsers_ = input.readInt64();
                bitField0_ |= 0x00000040;
                break;
              } // case 56
              case 64: {
                availableMemoryMB_ = input.readInt64();
                bitField0_ |= 0x00000080;
                break;
              } // case 64
              case 72: {
                allocatedMemoryMB_ = input.readInt64();
                bitField0_ |= 0x00000100;
                break;
              } // case 72
              case 80: {
                pendingMemoryMB_ = input.readInt64();
                bitField0_ |= 0x00000200;
                break;
              } // case 80
              case 88: {
                reservedMemoryMB_ = input.readInt64();
                bitField0_ |= 0x00000400;
                break;
              } // case 88
              case 96: {
                availableVCores_ = input.readInt64();
                bitField0_ |= 0x00000800;
                break;
              } // case 96
              case 104: {
                allocatedVCores_ = input.readInt64();
                bitField0_ |= 0x00001000;
                break;
              } // case 104
              case 112: {
                pendingVCores_ = input.readInt64();
                bitField0_ |= 0x00002000;
                break;
              } // case 112
              case 120: {
                reservedVCores_ = input.readInt64();
                bitField0_ |= 0x00004000;
                break;
              } // case 120
              case 128: {
                allocatedContainers_ = input.readInt64();
                bitField0_ |= 0x00008000;
                break;
              } // case 128
              case 136: {
                pendingContainers_ = input.readInt64();
                bitField0_ |= 0x00010000;
                break;
              } // case 136
              case 144: {
                reservedContainers_ = input.readInt64();
                bitField0_ |= 0x00020000;
                break;
              } // case 144
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private long numAppsSubmitted_ ;
      /**
       * <code>optional int64 numAppsSubmitted = 1;</code>
       * @return Whether the numAppsSubmitted field is set.
       */
      @java.lang.Override
      public boolean hasNumAppsSubmitted() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int64 numAppsSubmitted = 1;</code>
       * @return The numAppsSubmitted.
       */
      @java.lang.Override
      public long getNumAppsSubmitted() {
        return numAppsSubmitted_;
      }
      /**
       * <code>optional int64 numAppsSubmitted = 1;</code>
       * @param value The numAppsSubmitted to set.
       * @return This builder for chaining.
       */
      public Builder setNumAppsSubmitted(long value) {

        numAppsSubmitted_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 numAppsSubmitted = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumAppsSubmitted() {
        bitField0_ = (bitField0_ & ~0x00000001);
        numAppsSubmitted_ = 0L;
        onChanged();
        return this;
      }

      private long numAppsRunning_ ;
      /**
       * <code>optional int64 numAppsRunning = 2;</code>
       * @return Whether the numAppsRunning field is set.
       */
      @java.lang.Override
      public boolean hasNumAppsRunning() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int64 numAppsRunning = 2;</code>
       * @return The numAppsRunning.
       */
      @java.lang.Override
      public long getNumAppsRunning() {
        return numAppsRunning_;
      }
      /**
       * <code>optional int64 numAppsRunning = 2;</code>
       * @param value The numAppsRunning to set.
       * @return This builder for chaining.
       */
      public Builder setNumAppsRunning(long value) {

        numAppsRunning_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 numAppsRunning = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumAppsRunning() {
        bitField0_ = (bitField0_ & ~0x00000002);
        numAppsRunning_ = 0L;
        onChanged();
        return this;
      }

      private long numAppsPending_ ;
      /**
       * <code>optional int64 numAppsPending = 3;</code>
       * @return Whether the numAppsPending field is set.
       */
      @java.lang.Override
      public boolean hasNumAppsPending() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int64 numAppsPending = 3;</code>
       * @return The numAppsPending.
       */
      @java.lang.Override
      public long getNumAppsPending() {
        return numAppsPending_;
      }
      /**
       * <code>optional int64 numAppsPending = 3;</code>
       * @param value The numAppsPending to set.
       * @return This builder for chaining.
       */
      public Builder setNumAppsPending(long value) {

        numAppsPending_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 numAppsPending = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumAppsPending() {
        bitField0_ = (bitField0_ & ~0x00000004);
        numAppsPending_ = 0L;
        onChanged();
        return this;
      }

      private long numAppsCompleted_ ;
      /**
       * <code>optional int64 numAppsCompleted = 4;</code>
       * @return Whether the numAppsCompleted field is set.
       */
      @java.lang.Override
      public boolean hasNumAppsCompleted() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int64 numAppsCompleted = 4;</code>
       * @return The numAppsCompleted.
       */
      @java.lang.Override
      public long getNumAppsCompleted() {
        return numAppsCompleted_;
      }
      /**
       * <code>optional int64 numAppsCompleted = 4;</code>
       * @param value The numAppsCompleted to set.
       * @return This builder for chaining.
       */
      public Builder setNumAppsCompleted(long value) {

        numAppsCompleted_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 numAppsCompleted = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumAppsCompleted() {
        bitField0_ = (bitField0_ & ~0x00000008);
        numAppsCompleted_ = 0L;
        onChanged();
        return this;
      }

      private long numAppsKilled_ ;
      /**
       * <code>optional int64 numAppsKilled = 5;</code>
       * @return Whether the numAppsKilled field is set.
       */
      @java.lang.Override
      public boolean hasNumAppsKilled() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional int64 numAppsKilled = 5;</code>
       * @return The numAppsKilled.
       */
      @java.lang.Override
      public long getNumAppsKilled() {
        return numAppsKilled_;
      }
      /**
       * <code>optional int64 numAppsKilled = 5;</code>
       * @param value The numAppsKilled to set.
       * @return This builder for chaining.
       */
      public Builder setNumAppsKilled(long value) {

        numAppsKilled_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 numAppsKilled = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumAppsKilled() {
        bitField0_ = (bitField0_ & ~0x00000010);
        numAppsKilled_ = 0L;
        onChanged();
        return this;
      }

      private long numAppsFailed_ ;
      /**
       * <code>optional int64 numAppsFailed = 6;</code>
       * @return Whether the numAppsFailed field is set.
       */
      @java.lang.Override
      public boolean hasNumAppsFailed() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional int64 numAppsFailed = 6;</code>
       * @return The numAppsFailed.
       */
      @java.lang.Override
      public long getNumAppsFailed() {
        return numAppsFailed_;
      }
      /**
       * <code>optional int64 numAppsFailed = 6;</code>
       * @param value The numAppsFailed to set.
       * @return This builder for chaining.
       */
      public Builder setNumAppsFailed(long value) {

        numAppsFailed_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 numAppsFailed = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumAppsFailed() {
        bitField0_ = (bitField0_ & ~0x00000020);
        numAppsFailed_ = 0L;
        onChanged();
        return this;
      }

      private long numActiveUsers_ ;
      /**
       * <code>optional int64 numActiveUsers = 7;</code>
       * @return Whether the numActiveUsers field is set.
       */
      @java.lang.Override
      public boolean hasNumActiveUsers() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional int64 numActiveUsers = 7;</code>
       * @return The numActiveUsers.
       */
      @java.lang.Override
      public long getNumActiveUsers() {
        return numActiveUsers_;
      }
      /**
       * <code>optional int64 numActiveUsers = 7;</code>
       * @param value The numActiveUsers to set.
       * @return This builder for chaining.
       */
      public Builder setNumActiveUsers(long value) {

        numActiveUsers_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 numActiveUsers = 7;</code>
       * @return This builder for chaining.
       */
      public Builder clearNumActiveUsers() {
        bitField0_ = (bitField0_ & ~0x00000040);
        numActiveUsers_ = 0L;
        onChanged();
        return this;
      }

      private long availableMemoryMB_ ;
      /**
       * <code>optional int64 availableMemoryMB = 8;</code>
       * @return Whether the availableMemoryMB field is set.
       */
      @java.lang.Override
      public boolean hasAvailableMemoryMB() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional int64 availableMemoryMB = 8;</code>
       * @return The availableMemoryMB.
       */
      @java.lang.Override
      public long getAvailableMemoryMB() {
        return availableMemoryMB_;
      }
      /**
       * <code>optional int64 availableMemoryMB = 8;</code>
       * @param value The availableMemoryMB to set.
       * @return This builder for chaining.
       */
      public Builder setAvailableMemoryMB(long value) {

        availableMemoryMB_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 availableMemoryMB = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearAvailableMemoryMB() {
        bitField0_ = (bitField0_ & ~0x00000080);
        availableMemoryMB_ = 0L;
        onChanged();
        return this;
      }

      private long allocatedMemoryMB_ ;
      /**
       * <code>optional int64 allocatedMemoryMB = 9;</code>
       * @return Whether the allocatedMemoryMB field is set.
       */
      @java.lang.Override
      public boolean hasAllocatedMemoryMB() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional int64 allocatedMemoryMB = 9;</code>
       * @return The allocatedMemoryMB.
       */
      @java.lang.Override
      public long getAllocatedMemoryMB() {
        return allocatedMemoryMB_;
      }
      /**
       * <code>optional int64 allocatedMemoryMB = 9;</code>
       * @param value The allocatedMemoryMB to set.
       * @return This builder for chaining.
       */
      public Builder setAllocatedMemoryMB(long value) {

        allocatedMemoryMB_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 allocatedMemoryMB = 9;</code>
       * @return This builder for chaining.
       */
      public Builder clearAllocatedMemoryMB() {
        bitField0_ = (bitField0_ & ~0x00000100);
        allocatedMemoryMB_ = 0L;
        onChanged();
        return this;
      }

      private long pendingMemoryMB_ ;
      /**
       * <code>optional int64 pendingMemoryMB = 10;</code>
       * @return Whether the pendingMemoryMB field is set.
       */
      @java.lang.Override
      public boolean hasPendingMemoryMB() {
        return ((bitField0_ & 0x00000200) != 0);
      }
      /**
       * <code>optional int64 pendingMemoryMB = 10;</code>
       * @return The pendingMemoryMB.
       */
      @java.lang.Override
      public long getPendingMemoryMB() {
        return pendingMemoryMB_;
      }
      /**
       * <code>optional int64 pendingMemoryMB = 10;</code>
       * @param value The pendingMemoryMB to set.
       * @return This builder for chaining.
       */
      public Builder setPendingMemoryMB(long value) {

        pendingMemoryMB_ = value;
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 pendingMemoryMB = 10;</code>
       * @return This builder for chaining.
       */
      public Builder clearPendingMemoryMB() {
        bitField0_ = (bitField0_ & ~0x00000200);
        pendingMemoryMB_ = 0L;
        onChanged();
        return this;
      }

      private long reservedMemoryMB_ ;
      /**
       * <code>optional int64 reservedMemoryMB = 11;</code>
       * @return Whether the reservedMemoryMB field is set.
       */
      @java.lang.Override
      public boolean hasReservedMemoryMB() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * <code>optional int64 reservedMemoryMB = 11;</code>
       * @return The reservedMemoryMB.
       */
      @java.lang.Override
      public long getReservedMemoryMB() {
        return reservedMemoryMB_;
      }
      /**
       * <code>optional int64 reservedMemoryMB = 11;</code>
       * @param value The reservedMemoryMB to set.
       * @return This builder for chaining.
       */
      public Builder setReservedMemoryMB(long value) {

        reservedMemoryMB_ = value;
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 reservedMemoryMB = 11;</code>
       * @return This builder for chaining.
       */
      public Builder clearReservedMemoryMB() {
        bitField0_ = (bitField0_ & ~0x00000400);
        reservedMemoryMB_ = 0L;
        onChanged();
        return this;
      }

      private long availableVCores_ ;
      /**
       * <code>optional int64 availableVCores = 12;</code>
       * @return Whether the availableVCores field is set.
       */
      @java.lang.Override
      public boolean hasAvailableVCores() {
        return ((bitField0_ & 0x00000800) != 0);
      }
      /**
       * <code>optional int64 availableVCores = 12;</code>
       * @return The availableVCores.
       */
      @java.lang.Override
      public long getAvailableVCores() {
        return availableVCores_;
      }
      /**
       * <code>optional int64 availableVCores = 12;</code>
       * @param value The availableVCores to set.
       * @return This builder for chaining.
       */
      public Builder setAvailableVCores(long value) {

        availableVCores_ = value;
        bitField0_ |= 0x00000800;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 availableVCores = 12;</code>
       * @return This builder for chaining.
       */
      public Builder clearAvailableVCores() {
        bitField0_ = (bitField0_ & ~0x00000800);
        availableVCores_ = 0L;
        onChanged();
        return this;
      }

      private long allocatedVCores_ ;
      /**
       * <code>optional int64 allocatedVCores = 13;</code>
       * @return Whether the allocatedVCores field is set.
       */
      @java.lang.Override
      public boolean hasAllocatedVCores() {
        return ((bitField0_ & 0x00001000) != 0);
      }
      /**
       * <code>optional int64 allocatedVCores = 13;</code>
       * @return The allocatedVCores.
       */
      @java.lang.Override
      public long getAllocatedVCores() {
        return allocatedVCores_;
      }
      /**
       * <code>optional int64 allocatedVCores = 13;</code>
       * @param value The allocatedVCores to set.
       * @return This builder for chaining.
       */
      public Builder setAllocatedVCores(long value) {

        allocatedVCores_ = value;
        bitField0_ |= 0x00001000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 allocatedVCores = 13;</code>
       * @return This builder for chaining.
       */
      public Builder clearAllocatedVCores() {
        bitField0_ = (bitField0_ & ~0x00001000);
        allocatedVCores_ = 0L;
        onChanged();
        return this;
      }

      private long pendingVCores_ ;
      /**
       * <code>optional int64 pendingVCores = 14;</code>
       * @return Whether the pendingVCores field is set.
       */
      @java.lang.Override
      public boolean hasPendingVCores() {
        return ((bitField0_ & 0x00002000) != 0);
      }
      /**
       * <code>optional int64 pendingVCores = 14;</code>
       * @return The pendingVCores.
       */
      @java.lang.Override
      public long getPendingVCores() {
        return pendingVCores_;
      }
      /**
       * <code>optional int64 pendingVCores = 14;</code>
       * @param value The pendingVCores to set.
       * @return This builder for chaining.
       */
      public Builder setPendingVCores(long value) {

        pendingVCores_ = value;
        bitField0_ |= 0x00002000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 pendingVCores = 14;</code>
       * @return This builder for chaining.
       */
      public Builder clearPendingVCores() {
        bitField0_ = (bitField0_ & ~0x00002000);
        pendingVCores_ = 0L;
        onChanged();
        return this;
      }

      private long reservedVCores_ ;
      /**
       * <code>optional int64 reservedVCores = 15;</code>
       * @return Whether the reservedVCores field is set.
       */
      @java.lang.Override
      public boolean hasReservedVCores() {
        return ((bitField0_ & 0x00004000) != 0);
      }
      /**
       * <code>optional int64 reservedVCores = 15;</code>
       * @return The reservedVCores.
       */
      @java.lang.Override
      public long getReservedVCores() {
        return reservedVCores_;
      }
      /**
       * <code>optional int64 reservedVCores = 15;</code>
       * @param value The reservedVCores to set.
       * @return This builder for chaining.
       */
      public Builder setReservedVCores(long value) {

        reservedVCores_ = value;
        bitField0_ |= 0x00004000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 reservedVCores = 15;</code>
       * @return This builder for chaining.
       */
      public Builder clearReservedVCores() {
        bitField0_ = (bitField0_ & ~0x00004000);
        reservedVCores_ = 0L;
        onChanged();
        return this;
      }

      private long allocatedContainers_ ;
      /**
       * <code>optional int64 allocatedContainers = 16;</code>
       * @return Whether the allocatedContainers field is set.
       */
      @java.lang.Override
      public boolean hasAllocatedContainers() {
        return ((bitField0_ & 0x00008000) != 0);
      }
      /**
       * <code>optional int64 allocatedContainers = 16;</code>
       * @return The allocatedContainers.
       */
      @java.lang.Override
      public long getAllocatedContainers() {
        return allocatedContainers_;
      }
      /**
       * <code>optional int64 allocatedContainers = 16;</code>
       * @param value The allocatedContainers to set.
       * @return This builder for chaining.
       */
      public Builder setAllocatedContainers(long value) {

        allocatedContainers_ = value;
        bitField0_ |= 0x00008000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 allocatedContainers = 16;</code>
       * @return This builder for chaining.
       */
      public Builder clearAllocatedContainers() {
        bitField0_ = (bitField0_ & ~0x00008000);
        allocatedContainers_ = 0L;
        onChanged();
        return this;
      }

      private long pendingContainers_ ;
      /**
       * <code>optional int64 pendingContainers = 17;</code>
       * @return Whether the pendingContainers field is set.
       */
      @java.lang.Override
      public boolean hasPendingContainers() {
        return ((bitField0_ & 0x00010000) != 0);
      }
      /**
       * <code>optional int64 pendingContainers = 17;</code>
       * @return The pendingContainers.
       */
      @java.lang.Override
      public long getPendingContainers() {
        return pendingContainers_;
      }
      /**
       * <code>optional int64 pendingContainers = 17;</code>
       * @param value The pendingContainers to set.
       * @return This builder for chaining.
       */
      public Builder setPendingContainers(long value) {

        pendingContainers_ = value;
        bitField0_ |= 0x00010000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 pendingContainers = 17;</code>
       * @return This builder for chaining.
       */
      public Builder clearPendingContainers() {
        bitField0_ = (bitField0_ & ~0x00010000);
        pendingContainers_ = 0L;
        onChanged();
        return this;
      }

      private long reservedContainers_ ;
      /**
       * <code>optional int64 reservedContainers = 18;</code>
       * @return Whether the reservedContainers field is set.
       */
      @java.lang.Override
      public boolean hasReservedContainers() {
        return ((bitField0_ & 0x00020000) != 0);
      }
      /**
       * <code>optional int64 reservedContainers = 18;</code>
       * @return The reservedContainers.
       */
      @java.lang.Override
      public long getReservedContainers() {
        return reservedContainers_;
      }
      /**
       * <code>optional int64 reservedContainers = 18;</code>
       * @param value The reservedContainers to set.
       * @return This builder for chaining.
       */
      public Builder setReservedContainers(long value) {

        reservedContainers_ = value;
        bitField0_ |= 0x00020000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 reservedContainers = 18;</code>
       * @return This builder for chaining.
       */
      public Builder clearReservedContainers() {
        bitField0_ = (bitField0_ & ~0x00020000);
        reservedContainers_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.QueueStatisticsProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.QueueStatisticsProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<QueueStatisticsProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<QueueStatisticsProto>() {
      @java.lang.Override
      public QueueStatisticsProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<QueueStatisticsProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<QueueStatisticsProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface QueueInfoProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.QueueInfoProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string queueName = 1;</code>
     * @return Whether the queueName field is set.
     */
    boolean hasQueueName();
    /**
     * <code>optional string queueName = 1;</code>
     * @return The queueName.
     */
    java.lang.String getQueueName();
    /**
     * <code>optional string queueName = 1;</code>
     * @return The bytes for queueName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueNameBytes();

    /**
     * <code>optional float capacity = 2;</code>
     * @return Whether the capacity field is set.
     */
    boolean hasCapacity();
    /**
     * <code>optional float capacity = 2;</code>
     * @return The capacity.
     */
    float getCapacity();

    /**
     * <code>optional float maximumCapacity = 3;</code>
     * @return Whether the maximumCapacity field is set.
     */
    boolean hasMaximumCapacity();
    /**
     * <code>optional float maximumCapacity = 3;</code>
     * @return The maximumCapacity.
     */
    float getMaximumCapacity();

    /**
     * <code>optional float currentCapacity = 4;</code>
     * @return Whether the currentCapacity field is set.
     */
    boolean hasCurrentCapacity();
    /**
     * <code>optional float currentCapacity = 4;</code>
     * @return The currentCapacity.
     */
    float getCurrentCapacity();

    /**
     * <code>optional .hadoop.yarn.QueueStateProto state = 5;</code>
     * @return Whether the state field is set.
     */
    boolean hasState();
    /**
     * <code>optional .hadoop.yarn.QueueStateProto state = 5;</code>
     * @return The state.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto getState();

    /**
     * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto> 
        getChildQueuesList();
    /**
     * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getChildQueues(int index);
    /**
     * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
     */
    int getChildQueuesCount();
    /**
     * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> 
        getChildQueuesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getChildQueuesOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> 
        getApplicationsList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index);
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
     */
    int getApplicationsCount();
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> 
        getApplicationsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder(
        int index);

    /**
     * <code>repeated string accessibleNodeLabels = 8;</code>
     * @return A list containing the accessibleNodeLabels.
     */
    java.util.List<java.lang.String>
        getAccessibleNodeLabelsList();
    /**
     * <code>repeated string accessibleNodeLabels = 8;</code>
     * @return The count of accessibleNodeLabels.
     */
    int getAccessibleNodeLabelsCount();
    /**
     * <code>repeated string accessibleNodeLabels = 8;</code>
     * @param index The index of the element to return.
     * @return The accessibleNodeLabels at the given index.
     */
    java.lang.String getAccessibleNodeLabels(int index);
    /**
     * <code>repeated string accessibleNodeLabels = 8;</code>
     * @param index The index of the value to return.
     * @return The bytes of the accessibleNodeLabels at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAccessibleNodeLabelsBytes(int index);

    /**
     * <code>optional string defaultNodeLabelExpression = 9;</code>
     * @return Whether the defaultNodeLabelExpression field is set.
     */
    boolean hasDefaultNodeLabelExpression();
    /**
     * <code>optional string defaultNodeLabelExpression = 9;</code>
     * @return The defaultNodeLabelExpression.
     */
    java.lang.String getDefaultNodeLabelExpression();
    /**
     * <code>optional string defaultNodeLabelExpression = 9;</code>
     * @return The bytes for defaultNodeLabelExpression.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDefaultNodeLabelExpressionBytes();

    /**
     * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
     * @return Whether the queueStatistics field is set.
     */
    boolean hasQueueStatistics();
    /**
     * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
     * @return The queueStatistics.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getQueueStatistics();
    /**
     * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder getQueueStatisticsOrBuilder();

    /**
     * <code>optional bool preemptionDisabled = 11;</code>
     * @return Whether the preemptionDisabled field is set.
     */
    boolean hasPreemptionDisabled();
    /**
     * <code>optional bool preemptionDisabled = 11;</code>
     * @return The preemptionDisabled.
     */
    boolean getPreemptionDisabled();

    /**
     * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto> 
        getQueueConfigurationsMapList();
    /**
     * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getQueueConfigurationsMap(int index);
    /**
     * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
     */
    int getQueueConfigurationsMapCount();
    /**
     * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder> 
        getQueueConfigurationsMapOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder getQueueConfigurationsMapOrBuilder(
        int index);

    /**
     * <code>optional bool intraQueuePreemptionDisabled = 13;</code>
     * @return Whether the intraQueuePreemptionDisabled field is set.
     */
    boolean hasIntraQueuePreemptionDisabled();
    /**
     * <code>optional bool intraQueuePreemptionDisabled = 13;</code>
     * @return The intraQueuePreemptionDisabled.
     */
    boolean getIntraQueuePreemptionDisabled();

    /**
     * <code>optional float weight = 14;</code>
     * @return Whether the weight field is set.
     */
    boolean hasWeight();
    /**
     * <code>optional float weight = 14;</code>
     * @return The weight.
     */
    float getWeight();

    /**
     * <code>optional string queuePath = 15;</code>
     * @return Whether the queuePath field is set.
     */
    boolean hasQueuePath();
    /**
     * <code>optional string queuePath = 15;</code>
     * @return The queuePath.
     */
    java.lang.String getQueuePath();
    /**
     * <code>optional string queuePath = 15;</code>
     * @return The bytes for queuePath.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueuePathBytes();

    /**
     * <code>optional int32 maxParallelApps = 16;</code>
     * @return Whether the maxParallelApps field is set.
     */
    boolean hasMaxParallelApps();
    /**
     * <code>optional int32 maxParallelApps = 16;</code>
     * @return The maxParallelApps.
     */
    int getMaxParallelApps();

    /**
     * <code>optional string schedulerType = 17;</code>
     * @return Whether the schedulerType field is set.
     */
    boolean hasSchedulerType();
    /**
     * <code>optional string schedulerType = 17;</code>
     * @return The schedulerType.
     */
    java.lang.String getSchedulerType();
    /**
     * <code>optional string schedulerType = 17;</code>
     * @return The bytes for schedulerType.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getSchedulerTypeBytes();

    /**
     * <code>optional int32 minResourceVCore = 18;</code>
     * @return Whether the minResourceVCore field is set.
     */
    boolean hasMinResourceVCore();
    /**
     * <code>optional int32 minResourceVCore = 18;</code>
     * @return The minResourceVCore.
     */
    int getMinResourceVCore();

    /**
     * <code>optional int64 minResourceMemory = 19;</code>
     * @return Whether the minResourceMemory field is set.
     */
    boolean hasMinResourceMemory();
    /**
     * <code>optional int64 minResourceMemory = 19;</code>
     * @return The minResourceMemory.
     */
    long getMinResourceMemory();

    /**
     * <code>optional int32 maxResourceVCore = 20;</code>
     * @return Whether the maxResourceVCore field is set.
     */
    boolean hasMaxResourceVCore();
    /**
     * <code>optional int32 maxResourceVCore = 20;</code>
     * @return The maxResourceVCore.
     */
    int getMaxResourceVCore();

    /**
     * <code>optional int64 maxResourceMemory = 21;</code>
     * @return Whether the maxResourceMemory field is set.
     */
    boolean hasMaxResourceMemory();
    /**
     * <code>optional int64 maxResourceMemory = 21;</code>
     * @return The maxResourceMemory.
     */
    long getMaxResourceMemory();

    /**
     * <code>optional int32 reservedResourceVCore = 22;</code>
     * @return Whether the reservedResourceVCore field is set.
     */
    boolean hasReservedResourceVCore();
    /**
     * <code>optional int32 reservedResourceVCore = 22;</code>
     * @return The reservedResourceVCore.
     */
    int getReservedResourceVCore();

    /**
     * <code>optional int64 reservedResourceMemory = 23;</code>
     * @return Whether the reservedResourceMemory field is set.
     */
    boolean hasReservedResourceMemory();
    /**
     * <code>optional int64 reservedResourceMemory = 23;</code>
     * @return The reservedResourceMemory.
     */
    long getReservedResourceMemory();

    /**
     * <code>optional int32 steadyFairShareVCore = 24;</code>
     * @return Whether the steadyFairShareVCore field is set.
     */
    boolean hasSteadyFairShareVCore();
    /**
     * <code>optional int32 steadyFairShareVCore = 24;</code>
     * @return The steadyFairShareVCore.
     */
    int getSteadyFairShareVCore();

    /**
     * <code>optional int64 steadyFairShareMemory = 25;</code>
     * @return Whether the steadyFairShareMemory field is set.
     */
    boolean hasSteadyFairShareMemory();
    /**
     * <code>optional int64 steadyFairShareMemory = 25;</code>
     * @return The steadyFairShareMemory.
     */
    long getSteadyFairShareMemory();

    /**
     * <code>optional string subClusterId = 26;</code>
     * @return Whether the subClusterId field is set.
     */
    boolean hasSubClusterId();
    /**
     * <code>optional string subClusterId = 26;</code>
     * @return The subClusterId.
     */
    java.lang.String getSubClusterId();
    /**
     * <code>optional string subClusterId = 26;</code>
     * @return The bytes for subClusterId.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterIdBytes();

    /**
     * <code>optional int32 maxRunningApp = 27;</code>
     * @return Whether the maxRunningApp field is set.
     */
    boolean hasMaxRunningApp();
    /**
     * <code>optional int32 maxRunningApp = 27;</code>
     * @return The maxRunningApp.
     */
    int getMaxRunningApp();
  }
  /**
   * Protobuf type {@code hadoop.yarn.QueueInfoProto}
   */
  public static final class QueueInfoProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.QueueInfoProto)
      QueueInfoProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use QueueInfoProto.newBuilder() to construct.
    private QueueInfoProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private QueueInfoProto() {
      queueName_ = "";
      state_ = 1;
      childQueues_ = java.util.Collections.emptyList();
      applications_ = java.util.Collections.emptyList();
      accessibleNodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      defaultNodeLabelExpression_ = "";
      queueConfigurationsMap_ = java.util.Collections.emptyList();
      queuePath_ = "";
      schedulerType_ = "";
      subClusterId_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new QueueInfoProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueInfoProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueInfoProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder.class);
    }

    private int bitField0_;
    public static final int QUEUENAME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object queueName_ = "";
    /**
     * <code>optional string queueName = 1;</code>
     * @return Whether the queueName field is set.
     */
    @java.lang.Override
    public boolean hasQueueName() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string queueName = 1;</code>
     * @return The queueName.
     */
    @java.lang.Override
    public java.lang.String getQueueName() {
      java.lang.Object ref = queueName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queueName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string queueName = 1;</code>
     * @return The bytes for queueName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueNameBytes() {
      java.lang.Object ref = queueName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queueName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int CAPACITY_FIELD_NUMBER = 2;
    private float capacity_ = 0F;
    /**
     * <code>optional float capacity = 2;</code>
     * @return Whether the capacity field is set.
     */
    @java.lang.Override
    public boolean hasCapacity() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional float capacity = 2;</code>
     * @return The capacity.
     */
    @java.lang.Override
    public float getCapacity() {
      return capacity_;
    }

    public static final int MAXIMUMCAPACITY_FIELD_NUMBER = 3;
    private float maximumCapacity_ = 0F;
    /**
     * <code>optional float maximumCapacity = 3;</code>
     * @return Whether the maximumCapacity field is set.
     */
    @java.lang.Override
    public boolean hasMaximumCapacity() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional float maximumCapacity = 3;</code>
     * @return The maximumCapacity.
     */
    @java.lang.Override
    public float getMaximumCapacity() {
      return maximumCapacity_;
    }

    public static final int CURRENTCAPACITY_FIELD_NUMBER = 4;
    private float currentCapacity_ = 0F;
    /**
     * <code>optional float currentCapacity = 4;</code>
     * @return Whether the currentCapacity field is set.
     */
    @java.lang.Override
    public boolean hasCurrentCapacity() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional float currentCapacity = 4;</code>
     * @return The currentCapacity.
     */
    @java.lang.Override
    public float getCurrentCapacity() {
      return currentCapacity_;
    }

    public static final int STATE_FIELD_NUMBER = 5;
    private int state_ = 1;
    /**
     * <code>optional .hadoop.yarn.QueueStateProto state = 5;</code>
     * @return Whether the state field is set.
     */
    @java.lang.Override public boolean hasState() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.QueueStateProto state = 5;</code>
     * @return The state.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto getState() {
      org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto.forNumber(state_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto.Q_STOPPED : result;
    }

    public static final int CHILDQUEUES_FIELD_NUMBER = 6;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto> childQueues_;
    /**
     * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto> getChildQueuesList() {
      return childQueues_;
    }
    /**
     * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> 
        getChildQueuesOrBuilderList() {
      return childQueues_;
    }
    /**
     * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
     */
    @java.lang.Override
    public int getChildQueuesCount() {
      return childQueues_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getChildQueues(int index) {
      return childQueues_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getChildQueuesOrBuilder(
        int index) {
      return childQueues_.get(index);
    }

    public static final int APPLICATIONS_FIELD_NUMBER = 7;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> applications_;
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> getApplicationsList() {
      return applications_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> 
        getApplicationsOrBuilderList() {
      return applications_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
     */
    @java.lang.Override
    public int getApplicationsCount() {
      return applications_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index) {
      return applications_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder(
        int index) {
      return applications_.get(index);
    }

    public static final int ACCESSIBLENODELABELS_FIELD_NUMBER = 8;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList accessibleNodeLabels_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string accessibleNodeLabels = 8;</code>
     * @return A list containing the accessibleNodeLabels.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getAccessibleNodeLabelsList() {
      return accessibleNodeLabels_;
    }
    /**
     * <code>repeated string accessibleNodeLabels = 8;</code>
     * @return The count of accessibleNodeLabels.
     */
    public int getAccessibleNodeLabelsCount() {
      return accessibleNodeLabels_.size();
    }
    /**
     * <code>repeated string accessibleNodeLabels = 8;</code>
     * @param index The index of the element to return.
     * @return The accessibleNodeLabels at the given index.
     */
    public java.lang.String getAccessibleNodeLabels(int index) {
      return accessibleNodeLabels_.get(index);
    }
    /**
     * <code>repeated string accessibleNodeLabels = 8;</code>
     * @param index The index of the value to return.
     * @return The bytes of the accessibleNodeLabels at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAccessibleNodeLabelsBytes(int index) {
      return accessibleNodeLabels_.getByteString(index);
    }

    public static final int DEFAULTNODELABELEXPRESSION_FIELD_NUMBER = 9;
    @SuppressWarnings("serial")
    private volatile java.lang.Object defaultNodeLabelExpression_ = "";
    /**
     * <code>optional string defaultNodeLabelExpression = 9;</code>
     * @return Whether the defaultNodeLabelExpression field is set.
     */
    @java.lang.Override
    public boolean hasDefaultNodeLabelExpression() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional string defaultNodeLabelExpression = 9;</code>
     * @return The defaultNodeLabelExpression.
     */
    @java.lang.Override
    public java.lang.String getDefaultNodeLabelExpression() {
      java.lang.Object ref = defaultNodeLabelExpression_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          defaultNodeLabelExpression_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string defaultNodeLabelExpression = 9;</code>
     * @return The bytes for defaultNodeLabelExpression.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDefaultNodeLabelExpressionBytes() {
      java.lang.Object ref = defaultNodeLabelExpression_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        defaultNodeLabelExpression_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int QUEUESTATISTICS_FIELD_NUMBER = 10;
    private org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto queueStatistics_;
    /**
     * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
     * @return Whether the queueStatistics field is set.
     */
    @java.lang.Override
    public boolean hasQueueStatistics() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
     * @return The queueStatistics.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getQueueStatistics() {
      return queueStatistics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance() : queueStatistics_;
    }
    /**
     * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder getQueueStatisticsOrBuilder() {
      return queueStatistics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance() : queueStatistics_;
    }

    public static final int PREEMPTIONDISABLED_FIELD_NUMBER = 11;
    private boolean preemptionDisabled_ = false;
    /**
     * <code>optional bool preemptionDisabled = 11;</code>
     * @return Whether the preemptionDisabled field is set.
     */
    @java.lang.Override
    public boolean hasPreemptionDisabled() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional bool preemptionDisabled = 11;</code>
     * @return The preemptionDisabled.
     */
    @java.lang.Override
    public boolean getPreemptionDisabled() {
      return preemptionDisabled_;
    }

    public static final int QUEUECONFIGURATIONSMAP_FIELD_NUMBER = 12;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto> queueConfigurationsMap_;
    /**
     * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto> getQueueConfigurationsMapList() {
      return queueConfigurationsMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder> 
        getQueueConfigurationsMapOrBuilderList() {
      return queueConfigurationsMap_;
    }
    /**
     * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
     */
    @java.lang.Override
    public int getQueueConfigurationsMapCount() {
      return queueConfigurationsMap_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getQueueConfigurationsMap(int index) {
      return queueConfigurationsMap_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder getQueueConfigurationsMapOrBuilder(
        int index) {
      return queueConfigurationsMap_.get(index);
    }

    public static final int INTRAQUEUEPREEMPTIONDISABLED_FIELD_NUMBER = 13;
    private boolean intraQueuePreemptionDisabled_ = false;
    /**
     * <code>optional bool intraQueuePreemptionDisabled = 13;</code>
     * @return Whether the intraQueuePreemptionDisabled field is set.
     */
    @java.lang.Override
    public boolean hasIntraQueuePreemptionDisabled() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional bool intraQueuePreemptionDisabled = 13;</code>
     * @return The intraQueuePreemptionDisabled.
     */
    @java.lang.Override
    public boolean getIntraQueuePreemptionDisabled() {
      return intraQueuePreemptionDisabled_;
    }

    public static final int WEIGHT_FIELD_NUMBER = 14;
    private float weight_ = 0F;
    /**
     * <code>optional float weight = 14;</code>
     * @return Whether the weight field is set.
     */
    @java.lang.Override
    public boolean hasWeight() {
      return ((bitField0_ & 0x00000200) != 0);
    }
    /**
     * <code>optional float weight = 14;</code>
     * @return The weight.
     */
    @java.lang.Override
    public float getWeight() {
      return weight_;
    }

    public static final int QUEUEPATH_FIELD_NUMBER = 15;
    @SuppressWarnings("serial")
    private volatile java.lang.Object queuePath_ = "";
    /**
     * <code>optional string queuePath = 15;</code>
     * @return Whether the queuePath field is set.
     */
    @java.lang.Override
    public boolean hasQueuePath() {
      return ((bitField0_ & 0x00000400) != 0);
    }
    /**
     * <code>optional string queuePath = 15;</code>
     * @return The queuePath.
     */
    @java.lang.Override
    public java.lang.String getQueuePath() {
      java.lang.Object ref = queuePath_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queuePath_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string queuePath = 15;</code>
     * @return The bytes for queuePath.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueuePathBytes() {
      java.lang.Object ref = queuePath_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queuePath_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int MAXPARALLELAPPS_FIELD_NUMBER = 16;
    private int maxParallelApps_ = 0;
    /**
     * <code>optional int32 maxParallelApps = 16;</code>
     * @return Whether the maxParallelApps field is set.
     */
    @java.lang.Override
    public boolean hasMaxParallelApps() {
      return ((bitField0_ & 0x00000800) != 0);
    }
    /**
     * <code>optional int32 maxParallelApps = 16;</code>
     * @return The maxParallelApps.
     */
    @java.lang.Override
    public int getMaxParallelApps() {
      return maxParallelApps_;
    }

    public static final int SCHEDULERTYPE_FIELD_NUMBER = 17;
    @SuppressWarnings("serial")
    private volatile java.lang.Object schedulerType_ = "";
    /**
     * <code>optional string schedulerType = 17;</code>
     * @return Whether the schedulerType field is set.
     */
    @java.lang.Override
    public boolean hasSchedulerType() {
      return ((bitField0_ & 0x00001000) != 0);
    }
    /**
     * <code>optional string schedulerType = 17;</code>
     * @return The schedulerType.
     */
    @java.lang.Override
    public java.lang.String getSchedulerType() {
      java.lang.Object ref = schedulerType_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          schedulerType_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string schedulerType = 17;</code>
     * @return The bytes for schedulerType.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getSchedulerTypeBytes() {
      java.lang.Object ref = schedulerType_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        schedulerType_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int MINRESOURCEVCORE_FIELD_NUMBER = 18;
    private int minResourceVCore_ = 0;
    /**
     * <code>optional int32 minResourceVCore = 18;</code>
     * @return Whether the minResourceVCore field is set.
     */
    @java.lang.Override
    public boolean hasMinResourceVCore() {
      return ((bitField0_ & 0x00002000) != 0);
    }
    /**
     * <code>optional int32 minResourceVCore = 18;</code>
     * @return The minResourceVCore.
     */
    @java.lang.Override
    public int getMinResourceVCore() {
      return minResourceVCore_;
    }

    public static final int MINRESOURCEMEMORY_FIELD_NUMBER = 19;
    private long minResourceMemory_ = 0L;
    /**
     * <code>optional int64 minResourceMemory = 19;</code>
     * @return Whether the minResourceMemory field is set.
     */
    @java.lang.Override
    public boolean hasMinResourceMemory() {
      return ((bitField0_ & 0x00004000) != 0);
    }
    /**
     * <code>optional int64 minResourceMemory = 19;</code>
     * @return The minResourceMemory.
     */
    @java.lang.Override
    public long getMinResourceMemory() {
      return minResourceMemory_;
    }

    public static final int MAXRESOURCEVCORE_FIELD_NUMBER = 20;
    private int maxResourceVCore_ = 0;
    /**
     * <code>optional int32 maxResourceVCore = 20;</code>
     * @return Whether the maxResourceVCore field is set.
     */
    @java.lang.Override
    public boolean hasMaxResourceVCore() {
      return ((bitField0_ & 0x00008000) != 0);
    }
    /**
     * <code>optional int32 maxResourceVCore = 20;</code>
     * @return The maxResourceVCore.
     */
    @java.lang.Override
    public int getMaxResourceVCore() {
      return maxResourceVCore_;
    }

    public static final int MAXRESOURCEMEMORY_FIELD_NUMBER = 21;
    private long maxResourceMemory_ = 0L;
    /**
     * <code>optional int64 maxResourceMemory = 21;</code>
     * @return Whether the maxResourceMemory field is set.
     */
    @java.lang.Override
    public boolean hasMaxResourceMemory() {
      return ((bitField0_ & 0x00010000) != 0);
    }
    /**
     * <code>optional int64 maxResourceMemory = 21;</code>
     * @return The maxResourceMemory.
     */
    @java.lang.Override
    public long getMaxResourceMemory() {
      return maxResourceMemory_;
    }

    public static final int RESERVEDRESOURCEVCORE_FIELD_NUMBER = 22;
    private int reservedResourceVCore_ = 0;
    /**
     * <code>optional int32 reservedResourceVCore = 22;</code>
     * @return Whether the reservedResourceVCore field is set.
     */
    @java.lang.Override
    public boolean hasReservedResourceVCore() {
      return ((bitField0_ & 0x00020000) != 0);
    }
    /**
     * <code>optional int32 reservedResourceVCore = 22;</code>
     * @return The reservedResourceVCore.
     */
    @java.lang.Override
    public int getReservedResourceVCore() {
      return reservedResourceVCore_;
    }

    public static final int RESERVEDRESOURCEMEMORY_FIELD_NUMBER = 23;
    private long reservedResourceMemory_ = 0L;
    /**
     * <code>optional int64 reservedResourceMemory = 23;</code>
     * @return Whether the reservedResourceMemory field is set.
     */
    @java.lang.Override
    public boolean hasReservedResourceMemory() {
      return ((bitField0_ & 0x00040000) != 0);
    }
    /**
     * <code>optional int64 reservedResourceMemory = 23;</code>
     * @return The reservedResourceMemory.
     */
    @java.lang.Override
    public long getReservedResourceMemory() {
      return reservedResourceMemory_;
    }

    public static final int STEADYFAIRSHAREVCORE_FIELD_NUMBER = 24;
    private int steadyFairShareVCore_ = 0;
    /**
     * <code>optional int32 steadyFairShareVCore = 24;</code>
     * @return Whether the steadyFairShareVCore field is set.
     */
    @java.lang.Override
    public boolean hasSteadyFairShareVCore() {
      return ((bitField0_ & 0x00080000) != 0);
    }
    /**
     * <code>optional int32 steadyFairShareVCore = 24;</code>
     * @return The steadyFairShareVCore.
     */
    @java.lang.Override
    public int getSteadyFairShareVCore() {
      return steadyFairShareVCore_;
    }

    public static final int STEADYFAIRSHAREMEMORY_FIELD_NUMBER = 25;
    private long steadyFairShareMemory_ = 0L;
    /**
     * <code>optional int64 steadyFairShareMemory = 25;</code>
     * @return Whether the steadyFairShareMemory field is set.
     */
    @java.lang.Override
    public boolean hasSteadyFairShareMemory() {
      return ((bitField0_ & 0x00100000) != 0);
    }
    /**
     * <code>optional int64 steadyFairShareMemory = 25;</code>
     * @return The steadyFairShareMemory.
     */
    @java.lang.Override
    public long getSteadyFairShareMemory() {
      return steadyFairShareMemory_;
    }

    public static final int SUBCLUSTERID_FIELD_NUMBER = 26;
    @SuppressWarnings("serial")
    private volatile java.lang.Object subClusterId_ = "";
    /**
     * <code>optional string subClusterId = 26;</code>
     * @return Whether the subClusterId field is set.
     */
    @java.lang.Override
    public boolean hasSubClusterId() {
      return ((bitField0_ & 0x00200000) != 0);
    }
    /**
     * <code>optional string subClusterId = 26;</code>
     * @return The subClusterId.
     */
    @java.lang.Override
    public java.lang.String getSubClusterId() {
      java.lang.Object ref = subClusterId_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          subClusterId_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string subClusterId = 26;</code>
     * @return The bytes for subClusterId.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubClusterIdBytes() {
      java.lang.Object ref = subClusterId_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        subClusterId_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int MAXRUNNINGAPP_FIELD_NUMBER = 27;
    private int maxRunningApp_ = 0;
    /**
     * <code>optional int32 maxRunningApp = 27;</code>
     * @return Whether the maxRunningApp field is set.
     */
    @java.lang.Override
    public boolean hasMaxRunningApp() {
      return ((bitField0_ & 0x00400000) != 0);
    }
    /**
     * <code>optional int32 maxRunningApp = 27;</code>
     * @return The maxRunningApp.
     */
    @java.lang.Override
    public int getMaxRunningApp() {
      return maxRunningApp_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getChildQueuesCount(); i++) {
        if (!getChildQueues(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getApplicationsCount(); i++) {
        if (!getApplications(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getQueueConfigurationsMapCount(); i++) {
        if (!getQueueConfigurationsMap(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queueName_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeFloat(2, capacity_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeFloat(3, maximumCapacity_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeFloat(4, currentCapacity_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeEnum(5, state_);
      }
      for (int i = 0; i < childQueues_.size(); i++) {
        output.writeMessage(6, childQueues_.get(i));
      }
      for (int i = 0; i < applications_.size(); i++) {
        output.writeMessage(7, applications_.get(i));
      }
      for (int i = 0; i < accessibleNodeLabels_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, accessibleNodeLabels_.getRaw(i));
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 9, defaultNodeLabelExpression_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeMessage(10, getQueueStatistics());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeBool(11, preemptionDisabled_);
      }
      for (int i = 0; i < queueConfigurationsMap_.size(); i++) {
        output.writeMessage(12, queueConfigurationsMap_.get(i));
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeBool(13, intraQueuePreemptionDisabled_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        output.writeFloat(14, weight_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 15, queuePath_);
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        output.writeInt32(16, maxParallelApps_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 17, schedulerType_);
      }
      if (((bitField0_ & 0x00002000) != 0)) {
        output.writeInt32(18, minResourceVCore_);
      }
      if (((bitField0_ & 0x00004000) != 0)) {
        output.writeInt64(19, minResourceMemory_);
      }
      if (((bitField0_ & 0x00008000) != 0)) {
        output.writeInt32(20, maxResourceVCore_);
      }
      if (((bitField0_ & 0x00010000) != 0)) {
        output.writeInt64(21, maxResourceMemory_);
      }
      if (((bitField0_ & 0x00020000) != 0)) {
        output.writeInt32(22, reservedResourceVCore_);
      }
      if (((bitField0_ & 0x00040000) != 0)) {
        output.writeInt64(23, reservedResourceMemory_);
      }
      if (((bitField0_ & 0x00080000) != 0)) {
        output.writeInt32(24, steadyFairShareVCore_);
      }
      if (((bitField0_ & 0x00100000) != 0)) {
        output.writeInt64(25, steadyFairShareMemory_);
      }
      if (((bitField0_ & 0x00200000) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 26, subClusterId_);
      }
      if (((bitField0_ & 0x00400000) != 0)) {
        output.writeInt32(27, maxRunningApp_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queueName_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(2, capacity_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(3, maximumCapacity_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(4, currentCapacity_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(5, state_);
      }
      for (int i = 0; i < childQueues_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(6, childQueues_.get(i));
      }
      for (int i = 0; i < applications_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, applications_.get(i));
      }
      {
        int dataSize = 0;
        for (int i = 0; i < accessibleNodeLabels_.size(); i++) {
          dataSize += computeStringSizeNoTag(accessibleNodeLabels_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getAccessibleNodeLabelsList().size();
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(9, defaultNodeLabelExpression_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(10, getQueueStatistics());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(11, preemptionDisabled_);
      }
      for (int i = 0; i < queueConfigurationsMap_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(12, queueConfigurationsMap_.get(i));
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(13, intraQueuePreemptionDisabled_);
      }
      if (((bitField0_ & 0x00000200) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(14, weight_);
      }
      if (((bitField0_ & 0x00000400) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(15, queuePath_);
      }
      if (((bitField0_ & 0x00000800) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(16, maxParallelApps_);
      }
      if (((bitField0_ & 0x00001000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(17, schedulerType_);
      }
      if (((bitField0_ & 0x00002000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(18, minResourceVCore_);
      }
      if (((bitField0_ & 0x00004000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(19, minResourceMemory_);
      }
      if (((bitField0_ & 0x00008000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(20, maxResourceVCore_);
      }
      if (((bitField0_ & 0x00010000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(21, maxResourceMemory_);
      }
      if (((bitField0_ & 0x00020000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(22, reservedResourceVCore_);
      }
      if (((bitField0_ & 0x00040000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(23, reservedResourceMemory_);
      }
      if (((bitField0_ & 0x00080000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(24, steadyFairShareVCore_);
      }
      if (((bitField0_ & 0x00100000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(25, steadyFairShareMemory_);
      }
      if (((bitField0_ & 0x00200000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(26, subClusterId_);
      }
      if (((bitField0_ & 0x00400000) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(27, maxRunningApp_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto other = (org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto) obj;

      if (hasQueueName() != other.hasQueueName()) return false;
      if (hasQueueName()) {
        if (!getQueueName()
            .equals(other.getQueueName())) return false;
      }
      if (hasCapacity() != other.hasCapacity()) return false;
      if (hasCapacity()) {
        if (java.lang.Float.floatToIntBits(getCapacity())
            != java.lang.Float.floatToIntBits(
                other.getCapacity())) return false;
      }
      if (hasMaximumCapacity() != other.hasMaximumCapacity()) return false;
      if (hasMaximumCapacity()) {
        if (java.lang.Float.floatToIntBits(getMaximumCapacity())
            != java.lang.Float.floatToIntBits(
                other.getMaximumCapacity())) return false;
      }
      if (hasCurrentCapacity() != other.hasCurrentCapacity()) return false;
      if (hasCurrentCapacity()) {
        if (java.lang.Float.floatToIntBits(getCurrentCapacity())
            != java.lang.Float.floatToIntBits(
                other.getCurrentCapacity())) return false;
      }
      if (hasState() != other.hasState()) return false;
      if (hasState()) {
        if (state_ != other.state_) return false;
      }
      if (!getChildQueuesList()
          .equals(other.getChildQueuesList())) return false;
      if (!getApplicationsList()
          .equals(other.getApplicationsList())) return false;
      if (!getAccessibleNodeLabelsList()
          .equals(other.getAccessibleNodeLabelsList())) return false;
      if (hasDefaultNodeLabelExpression() != other.hasDefaultNodeLabelExpression()) return false;
      if (hasDefaultNodeLabelExpression()) {
        if (!getDefaultNodeLabelExpression()
            .equals(other.getDefaultNodeLabelExpression())) return false;
      }
      if (hasQueueStatistics() != other.hasQueueStatistics()) return false;
      if (hasQueueStatistics()) {
        if (!getQueueStatistics()
            .equals(other.getQueueStatistics())) return false;
      }
      if (hasPreemptionDisabled() != other.hasPreemptionDisabled()) return false;
      if (hasPreemptionDisabled()) {
        if (getPreemptionDisabled()
            != other.getPreemptionDisabled()) return false;
      }
      if (!getQueueConfigurationsMapList()
          .equals(other.getQueueConfigurationsMapList())) return false;
      if (hasIntraQueuePreemptionDisabled() != other.hasIntraQueuePreemptionDisabled()) return false;
      if (hasIntraQueuePreemptionDisabled()) {
        if (getIntraQueuePreemptionDisabled()
            != other.getIntraQueuePreemptionDisabled()) return false;
      }
      if (hasWeight() != other.hasWeight()) return false;
      if (hasWeight()) {
        if (java.lang.Float.floatToIntBits(getWeight())
            != java.lang.Float.floatToIntBits(
                other.getWeight())) return false;
      }
      if (hasQueuePath() != other.hasQueuePath()) return false;
      if (hasQueuePath()) {
        if (!getQueuePath()
            .equals(other.getQueuePath())) return false;
      }
      if (hasMaxParallelApps() != other.hasMaxParallelApps()) return false;
      if (hasMaxParallelApps()) {
        if (getMaxParallelApps()
            != other.getMaxParallelApps()) return false;
      }
      if (hasSchedulerType() != other.hasSchedulerType()) return false;
      if (hasSchedulerType()) {
        if (!getSchedulerType()
            .equals(other.getSchedulerType())) return false;
      }
      if (hasMinResourceVCore() != other.hasMinResourceVCore()) return false;
      if (hasMinResourceVCore()) {
        if (getMinResourceVCore()
            != other.getMinResourceVCore()) return false;
      }
      if (hasMinResourceMemory() != other.hasMinResourceMemory()) return false;
      if (hasMinResourceMemory()) {
        if (getMinResourceMemory()
            != other.getMinResourceMemory()) return false;
      }
      if (hasMaxResourceVCore() != other.hasMaxResourceVCore()) return false;
      if (hasMaxResourceVCore()) {
        if (getMaxResourceVCore()
            != other.getMaxResourceVCore()) return false;
      }
      if (hasMaxResourceMemory() != other.hasMaxResourceMemory()) return false;
      if (hasMaxResourceMemory()) {
        if (getMaxResourceMemory()
            != other.getMaxResourceMemory()) return false;
      }
      if (hasReservedResourceVCore() != other.hasReservedResourceVCore()) return false;
      if (hasReservedResourceVCore()) {
        if (getReservedResourceVCore()
            != other.getReservedResourceVCore()) return false;
      }
      if (hasReservedResourceMemory() != other.hasReservedResourceMemory()) return false;
      if (hasReservedResourceMemory()) {
        if (getReservedResourceMemory()
            != other.getReservedResourceMemory()) return false;
      }
      if (hasSteadyFairShareVCore() != other.hasSteadyFairShareVCore()) return false;
      if (hasSteadyFairShareVCore()) {
        if (getSteadyFairShareVCore()
            != other.getSteadyFairShareVCore()) return false;
      }
      if (hasSteadyFairShareMemory() != other.hasSteadyFairShareMemory()) return false;
      if (hasSteadyFairShareMemory()) {
        if (getSteadyFairShareMemory()
            != other.getSteadyFairShareMemory()) return false;
      }
      if (hasSubClusterId() != other.hasSubClusterId()) return false;
      if (hasSubClusterId()) {
        if (!getSubClusterId()
            .equals(other.getSubClusterId())) return false;
      }
      if (hasMaxRunningApp() != other.hasMaxRunningApp()) return false;
      if (hasMaxRunningApp()) {
        if (getMaxRunningApp()
            != other.getMaxRunningApp()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasQueueName()) {
        hash = (37 * hash) + QUEUENAME_FIELD_NUMBER;
        hash = (53 * hash) + getQueueName().hashCode();
      }
      if (hasCapacity()) {
        hash = (37 * hash) + CAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getCapacity());
      }
      if (hasMaximumCapacity()) {
        hash = (37 * hash) + MAXIMUMCAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getMaximumCapacity());
      }
      if (hasCurrentCapacity()) {
        hash = (37 * hash) + CURRENTCAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getCurrentCapacity());
      }
      if (hasState()) {
        hash = (37 * hash) + STATE_FIELD_NUMBER;
        hash = (53 * hash) + state_;
      }
      if (getChildQueuesCount() > 0) {
        hash = (37 * hash) + CHILDQUEUES_FIELD_NUMBER;
        hash = (53 * hash) + getChildQueuesList().hashCode();
      }
      if (getApplicationsCount() > 0) {
        hash = (37 * hash) + APPLICATIONS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationsList().hashCode();
      }
      if (getAccessibleNodeLabelsCount() > 0) {
        hash = (37 * hash) + ACCESSIBLENODELABELS_FIELD_NUMBER;
        hash = (53 * hash) + getAccessibleNodeLabelsList().hashCode();
      }
      if (hasDefaultNodeLabelExpression()) {
        hash = (37 * hash) + DEFAULTNODELABELEXPRESSION_FIELD_NUMBER;
        hash = (53 * hash) + getDefaultNodeLabelExpression().hashCode();
      }
      if (hasQueueStatistics()) {
        hash = (37 * hash) + QUEUESTATISTICS_FIELD_NUMBER;
        hash = (53 * hash) + getQueueStatistics().hashCode();
      }
      if (hasPreemptionDisabled()) {
        hash = (37 * hash) + PREEMPTIONDISABLED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getPreemptionDisabled());
      }
      if (getQueueConfigurationsMapCount() > 0) {
        hash = (37 * hash) + QUEUECONFIGURATIONSMAP_FIELD_NUMBER;
        hash = (53 * hash) + getQueueConfigurationsMapList().hashCode();
      }
      if (hasIntraQueuePreemptionDisabled()) {
        hash = (37 * hash) + INTRAQUEUEPREEMPTIONDISABLED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getIntraQueuePreemptionDisabled());
      }
      if (hasWeight()) {
        hash = (37 * hash) + WEIGHT_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getWeight());
      }
      if (hasQueuePath()) {
        hash = (37 * hash) + QUEUEPATH_FIELD_NUMBER;
        hash = (53 * hash) + getQueuePath().hashCode();
      }
      if (hasMaxParallelApps()) {
        hash = (37 * hash) + MAXPARALLELAPPS_FIELD_NUMBER;
        hash = (53 * hash) + getMaxParallelApps();
      }
      if (hasSchedulerType()) {
        hash = (37 * hash) + SCHEDULERTYPE_FIELD_NUMBER;
        hash = (53 * hash) + getSchedulerType().hashCode();
      }
      if (hasMinResourceVCore()) {
        hash = (37 * hash) + MINRESOURCEVCORE_FIELD_NUMBER;
        hash = (53 * hash) + getMinResourceVCore();
      }
      if (hasMinResourceMemory()) {
        hash = (37 * hash) + MINRESOURCEMEMORY_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getMinResourceMemory());
      }
      if (hasMaxResourceVCore()) {
        hash = (37 * hash) + MAXRESOURCEVCORE_FIELD_NUMBER;
        hash = (53 * hash) + getMaxResourceVCore();
      }
      if (hasMaxResourceMemory()) {
        hash = (37 * hash) + MAXRESOURCEMEMORY_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getMaxResourceMemory());
      }
      if (hasReservedResourceVCore()) {
        hash = (37 * hash) + RESERVEDRESOURCEVCORE_FIELD_NUMBER;
        hash = (53 * hash) + getReservedResourceVCore();
      }
      if (hasReservedResourceMemory()) {
        hash = (37 * hash) + RESERVEDRESOURCEMEMORY_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getReservedResourceMemory());
      }
      if (hasSteadyFairShareVCore()) {
        hash = (37 * hash) + STEADYFAIRSHAREVCORE_FIELD_NUMBER;
        hash = (53 * hash) + getSteadyFairShareVCore();
      }
      if (hasSteadyFairShareMemory()) {
        hash = (37 * hash) + STEADYFAIRSHAREMEMORY_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getSteadyFairShareMemory());
      }
      if (hasSubClusterId()) {
        hash = (37 * hash) + SUBCLUSTERID_FIELD_NUMBER;
        hash = (53 * hash) + getSubClusterId().hashCode();
      }
      if (hasMaxRunningApp()) {
        hash = (37 * hash) + MAXRUNNINGAPP_FIELD_NUMBER;
        hash = (53 * hash) + getMaxRunningApp();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.QueueInfoProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.QueueInfoProto)
        org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueInfoProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueInfoProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getChildQueuesFieldBuilder();
          getApplicationsFieldBuilder();
          getQueueStatisticsFieldBuilder();
          getQueueConfigurationsMapFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        queueName_ = "";
        capacity_ = 0F;
        maximumCapacity_ = 0F;
        currentCapacity_ = 0F;
        state_ = 1;
        if (childQueuesBuilder_ == null) {
          childQueues_ = java.util.Collections.emptyList();
        } else {
          childQueues_ = null;
          childQueuesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        if (applicationsBuilder_ == null) {
          applications_ = java.util.Collections.emptyList();
        } else {
          applications_ = null;
          applicationsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000040);
        accessibleNodeLabels_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        defaultNodeLabelExpression_ = "";
        queueStatistics_ = null;
        if (queueStatisticsBuilder_ != null) {
          queueStatisticsBuilder_.dispose();
          queueStatisticsBuilder_ = null;
        }
        preemptionDisabled_ = false;
        if (queueConfigurationsMapBuilder_ == null) {
          queueConfigurationsMap_ = java.util.Collections.emptyList();
        } else {
          queueConfigurationsMap_ = null;
          queueConfigurationsMapBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000800);
        intraQueuePreemptionDisabled_ = false;
        weight_ = 0F;
        queuePath_ = "";
        maxParallelApps_ = 0;
        schedulerType_ = "";
        minResourceVCore_ = 0;
        minResourceMemory_ = 0L;
        maxResourceVCore_ = 0;
        maxResourceMemory_ = 0L;
        reservedResourceVCore_ = 0;
        reservedResourceMemory_ = 0L;
        steadyFairShareVCore_ = 0;
        steadyFairShareMemory_ = 0L;
        subClusterId_ = "";
        maxRunningApp_ = 0;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueInfoProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto result = new org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto result) {
        if (childQueuesBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0)) {
            childQueues_ = java.util.Collections.unmodifiableList(childQueues_);
            bitField0_ = (bitField0_ & ~0x00000020);
          }
          result.childQueues_ = childQueues_;
        } else {
          result.childQueues_ = childQueuesBuilder_.build();
        }
        if (applicationsBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0)) {
            applications_ = java.util.Collections.unmodifiableList(applications_);
            bitField0_ = (bitField0_ & ~0x00000040);
          }
          result.applications_ = applications_;
        } else {
          result.applications_ = applicationsBuilder_.build();
        }
        if (queueConfigurationsMapBuilder_ == null) {
          if (((bitField0_ & 0x00000800) != 0)) {
            queueConfigurationsMap_ = java.util.Collections.unmodifiableList(queueConfigurationsMap_);
            bitField0_ = (bitField0_ & ~0x00000800);
          }
          result.queueConfigurationsMap_ = queueConfigurationsMap_;
        } else {
          result.queueConfigurationsMap_ = queueConfigurationsMapBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.queueName_ = queueName_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.capacity_ = capacity_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.maximumCapacity_ = maximumCapacity_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.currentCapacity_ = currentCapacity_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.state_ = state_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          accessibleNodeLabels_.makeImmutable();
          result.accessibleNodeLabels_ = accessibleNodeLabels_;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.defaultNodeLabelExpression_ = defaultNodeLabelExpression_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000200) != 0)) {
          result.queueStatistics_ = queueStatisticsBuilder_ == null
              ? queueStatistics_
              : queueStatisticsBuilder_.build();
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000400) != 0)) {
          result.preemptionDisabled_ = preemptionDisabled_;
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00001000) != 0)) {
          result.intraQueuePreemptionDisabled_ = intraQueuePreemptionDisabled_;
          to_bitField0_ |= 0x00000100;
        }
        if (((from_bitField0_ & 0x00002000) != 0)) {
          result.weight_ = weight_;
          to_bitField0_ |= 0x00000200;
        }
        if (((from_bitField0_ & 0x00004000) != 0)) {
          result.queuePath_ = queuePath_;
          to_bitField0_ |= 0x00000400;
        }
        if (((from_bitField0_ & 0x00008000) != 0)) {
          result.maxParallelApps_ = maxParallelApps_;
          to_bitField0_ |= 0x00000800;
        }
        if (((from_bitField0_ & 0x00010000) != 0)) {
          result.schedulerType_ = schedulerType_;
          to_bitField0_ |= 0x00001000;
        }
        if (((from_bitField0_ & 0x00020000) != 0)) {
          result.minResourceVCore_ = minResourceVCore_;
          to_bitField0_ |= 0x00002000;
        }
        if (((from_bitField0_ & 0x00040000) != 0)) {
          result.minResourceMemory_ = minResourceMemory_;
          to_bitField0_ |= 0x00004000;
        }
        if (((from_bitField0_ & 0x00080000) != 0)) {
          result.maxResourceVCore_ = maxResourceVCore_;
          to_bitField0_ |= 0x00008000;
        }
        if (((from_bitField0_ & 0x00100000) != 0)) {
          result.maxResourceMemory_ = maxResourceMemory_;
          to_bitField0_ |= 0x00010000;
        }
        if (((from_bitField0_ & 0x00200000) != 0)) {
          result.reservedResourceVCore_ = reservedResourceVCore_;
          to_bitField0_ |= 0x00020000;
        }
        if (((from_bitField0_ & 0x00400000) != 0)) {
          result.reservedResourceMemory_ = reservedResourceMemory_;
          to_bitField0_ |= 0x00040000;
        }
        if (((from_bitField0_ & 0x00800000) != 0)) {
          result.steadyFairShareVCore_ = steadyFairShareVCore_;
          to_bitField0_ |= 0x00080000;
        }
        if (((from_bitField0_ & 0x01000000) != 0)) {
          result.steadyFairShareMemory_ = steadyFairShareMemory_;
          to_bitField0_ |= 0x00100000;
        }
        if (((from_bitField0_ & 0x02000000) != 0)) {
          result.subClusterId_ = subClusterId_;
          to_bitField0_ |= 0x00200000;
        }
        if (((from_bitField0_ & 0x04000000) != 0)) {
          result.maxRunningApp_ = maxRunningApp_;
          to_bitField0_ |= 0x00400000;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance()) return this;
        if (other.hasQueueName()) {
          queueName_ = other.queueName_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasCapacity()) {
          setCapacity(other.getCapacity());
        }
        if (other.hasMaximumCapacity()) {
          setMaximumCapacity(other.getMaximumCapacity());
        }
        if (other.hasCurrentCapacity()) {
          setCurrentCapacity(other.getCurrentCapacity());
        }
        if (other.hasState()) {
          setState(other.getState());
        }
        if (childQueuesBuilder_ == null) {
          if (!other.childQueues_.isEmpty()) {
            if (childQueues_.isEmpty()) {
              childQueues_ = other.childQueues_;
              bitField0_ = (bitField0_ & ~0x00000020);
            } else {
              ensureChildQueuesIsMutable();
              childQueues_.addAll(other.childQueues_);
            }
            onChanged();
          }
        } else {
          if (!other.childQueues_.isEmpty()) {
            if (childQueuesBuilder_.isEmpty()) {
              childQueuesBuilder_.dispose();
              childQueuesBuilder_ = null;
              childQueues_ = other.childQueues_;
              bitField0_ = (bitField0_ & ~0x00000020);
              childQueuesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getChildQueuesFieldBuilder() : null;
            } else {
              childQueuesBuilder_.addAllMessages(other.childQueues_);
            }
          }
        }
        if (applicationsBuilder_ == null) {
          if (!other.applications_.isEmpty()) {
            if (applications_.isEmpty()) {
              applications_ = other.applications_;
              bitField0_ = (bitField0_ & ~0x00000040);
            } else {
              ensureApplicationsIsMutable();
              applications_.addAll(other.applications_);
            }
            onChanged();
          }
        } else {
          if (!other.applications_.isEmpty()) {
            if (applicationsBuilder_.isEmpty()) {
              applicationsBuilder_.dispose();
              applicationsBuilder_ = null;
              applications_ = other.applications_;
              bitField0_ = (bitField0_ & ~0x00000040);
              applicationsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationsFieldBuilder() : null;
            } else {
              applicationsBuilder_.addAllMessages(other.applications_);
            }
          }
        }
        if (!other.accessibleNodeLabels_.isEmpty()) {
          if (accessibleNodeLabels_.isEmpty()) {
            accessibleNodeLabels_ = other.accessibleNodeLabels_;
            bitField0_ |= 0x00000080;
          } else {
            ensureAccessibleNodeLabelsIsMutable();
            accessibleNodeLabels_.addAll(other.accessibleNodeLabels_);
          }
          onChanged();
        }
        if (other.hasDefaultNodeLabelExpression()) {
          defaultNodeLabelExpression_ = other.defaultNodeLabelExpression_;
          bitField0_ |= 0x00000100;
          onChanged();
        }
        if (other.hasQueueStatistics()) {
          mergeQueueStatistics(other.getQueueStatistics());
        }
        if (other.hasPreemptionDisabled()) {
          setPreemptionDisabled(other.getPreemptionDisabled());
        }
        if (queueConfigurationsMapBuilder_ == null) {
          if (!other.queueConfigurationsMap_.isEmpty()) {
            if (queueConfigurationsMap_.isEmpty()) {
              queueConfigurationsMap_ = other.queueConfigurationsMap_;
              bitField0_ = (bitField0_ & ~0x00000800);
            } else {
              ensureQueueConfigurationsMapIsMutable();
              queueConfigurationsMap_.addAll(other.queueConfigurationsMap_);
            }
            onChanged();
          }
        } else {
          if (!other.queueConfigurationsMap_.isEmpty()) {
            if (queueConfigurationsMapBuilder_.isEmpty()) {
              queueConfigurationsMapBuilder_.dispose();
              queueConfigurationsMapBuilder_ = null;
              queueConfigurationsMap_ = other.queueConfigurationsMap_;
              bitField0_ = (bitField0_ & ~0x00000800);
              queueConfigurationsMapBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getQueueConfigurationsMapFieldBuilder() : null;
            } else {
              queueConfigurationsMapBuilder_.addAllMessages(other.queueConfigurationsMap_);
            }
          }
        }
        if (other.hasIntraQueuePreemptionDisabled()) {
          setIntraQueuePreemptionDisabled(other.getIntraQueuePreemptionDisabled());
        }
        if (other.hasWeight()) {
          setWeight(other.getWeight());
        }
        if (other.hasQueuePath()) {
          queuePath_ = other.queuePath_;
          bitField0_ |= 0x00004000;
          onChanged();
        }
        if (other.hasMaxParallelApps()) {
          setMaxParallelApps(other.getMaxParallelApps());
        }
        if (other.hasSchedulerType()) {
          schedulerType_ = other.schedulerType_;
          bitField0_ |= 0x00010000;
          onChanged();
        }
        if (other.hasMinResourceVCore()) {
          setMinResourceVCore(other.getMinResourceVCore());
        }
        if (other.hasMinResourceMemory()) {
          setMinResourceMemory(other.getMinResourceMemory());
        }
        if (other.hasMaxResourceVCore()) {
          setMaxResourceVCore(other.getMaxResourceVCore());
        }
        if (other.hasMaxResourceMemory()) {
          setMaxResourceMemory(other.getMaxResourceMemory());
        }
        if (other.hasReservedResourceVCore()) {
          setReservedResourceVCore(other.getReservedResourceVCore());
        }
        if (other.hasReservedResourceMemory()) {
          setReservedResourceMemory(other.getReservedResourceMemory());
        }
        if (other.hasSteadyFairShareVCore()) {
          setSteadyFairShareVCore(other.getSteadyFairShareVCore());
        }
        if (other.hasSteadyFairShareMemory()) {
          setSteadyFairShareMemory(other.getSteadyFairShareMemory());
        }
        if (other.hasSubClusterId()) {
          subClusterId_ = other.subClusterId_;
          bitField0_ |= 0x02000000;
          onChanged();
        }
        if (other.hasMaxRunningApp()) {
          setMaxRunningApp(other.getMaxRunningApp());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getChildQueuesCount(); i++) {
          if (!getChildQueues(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getApplicationsCount(); i++) {
          if (!getApplications(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getQueueConfigurationsMapCount(); i++) {
          if (!getQueueConfigurationsMap(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                queueName_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 21: {
                capacity_ = input.readFloat();
                bitField0_ |= 0x00000002;
                break;
              } // case 21
              case 29: {
                maximumCapacity_ = input.readFloat();
                bitField0_ |= 0x00000004;
                break;
              } // case 29
              case 37: {
                currentCapacity_ = input.readFloat();
                bitField0_ |= 0x00000008;
                break;
              } // case 37
              case 40: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(5, tmpRaw);
                } else {
                  state_ = tmpRaw;
                  bitField0_ |= 0x00000010;
                }
                break;
              } // case 40
              case 50: {
                org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.PARSER,
                        extensionRegistry);
                if (childQueuesBuilder_ == null) {
                  ensureChildQueuesIsMutable();
                  childQueues_.add(m);
                } else {
                  childQueuesBuilder_.addMessage(m);
                }
                break;
              } // case 50
              case 58: {
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.PARSER,
                        extensionRegistry);
                if (applicationsBuilder_ == null) {
                  ensureApplicationsIsMutable();
                  applications_.add(m);
                } else {
                  applicationsBuilder_.addMessage(m);
                }
                break;
              } // case 58
              case 66: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureAccessibleNodeLabelsIsMutable();
                accessibleNodeLabels_.add(bs);
                break;
              } // case 66
              case 74: {
                defaultNodeLabelExpression_ = input.readBytes();
                bitField0_ |= 0x00000100;
                break;
              } // case 74
              case 82: {
                input.readMessage(
                    getQueueStatisticsFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000200;
                break;
              } // case 82
              case 88: {
                preemptionDisabled_ = input.readBool();
                bitField0_ |= 0x00000400;
                break;
              } // case 88
              case 98: {
                org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.PARSER,
                        extensionRegistry);
                if (queueConfigurationsMapBuilder_ == null) {
                  ensureQueueConfigurationsMapIsMutable();
                  queueConfigurationsMap_.add(m);
                } else {
                  queueConfigurationsMapBuilder_.addMessage(m);
                }
                break;
              } // case 98
              case 104: {
                intraQueuePreemptionDisabled_ = input.readBool();
                bitField0_ |= 0x00001000;
                break;
              } // case 104
              case 117: {
                weight_ = input.readFloat();
                bitField0_ |= 0x00002000;
                break;
              } // case 117
              case 122: {
                queuePath_ = input.readBytes();
                bitField0_ |= 0x00004000;
                break;
              } // case 122
              case 128: {
                maxParallelApps_ = input.readInt32();
                bitField0_ |= 0x00008000;
                break;
              } // case 128
              case 138: {
                schedulerType_ = input.readBytes();
                bitField0_ |= 0x00010000;
                break;
              } // case 138
              case 144: {
                minResourceVCore_ = input.readInt32();
                bitField0_ |= 0x00020000;
                break;
              } // case 144
              case 152: {
                minResourceMemory_ = input.readInt64();
                bitField0_ |= 0x00040000;
                break;
              } // case 152
              case 160: {
                maxResourceVCore_ = input.readInt32();
                bitField0_ |= 0x00080000;
                break;
              } // case 160
              case 168: {
                maxResourceMemory_ = input.readInt64();
                bitField0_ |= 0x00100000;
                break;
              } // case 168
              case 176: {
                reservedResourceVCore_ = input.readInt32();
                bitField0_ |= 0x00200000;
                break;
              } // case 176
              case 184: {
                reservedResourceMemory_ = input.readInt64();
                bitField0_ |= 0x00400000;
                break;
              } // case 184
              case 192: {
                steadyFairShareVCore_ = input.readInt32();
                bitField0_ |= 0x00800000;
                break;
              } // case 192
              case 200: {
                steadyFairShareMemory_ = input.readInt64();
                bitField0_ |= 0x01000000;
                break;
              } // case 200
              case 210: {
                subClusterId_ = input.readBytes();
                bitField0_ |= 0x02000000;
                break;
              } // case 210
              case 216: {
                maxRunningApp_ = input.readInt32();
                bitField0_ |= 0x04000000;
                break;
              } // case 216
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object queueName_ = "";
      /**
       * <code>optional string queueName = 1;</code>
       * @return Whether the queueName field is set.
       */
      public boolean hasQueueName() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @return The queueName.
       */
      public java.lang.String getQueueName() {
        java.lang.Object ref = queueName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queueName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @return The bytes for queueName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueNameBytes() {
        java.lang.Object ref = queueName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queueName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @param value The queueName to set.
       * @return This builder for chaining.
       */
      public Builder setQueueName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        queueName_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueueName() {
        queueName_ = getDefaultInstance().getQueueName();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @param value The bytes for queueName to set.
       * @return This builder for chaining.
       */
      public Builder setQueueNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        queueName_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private float capacity_ ;
      /**
       * <code>optional float capacity = 2;</code>
       * @return Whether the capacity field is set.
       */
      @java.lang.Override
      public boolean hasCapacity() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional float capacity = 2;</code>
       * @return The capacity.
       */
      @java.lang.Override
      public float getCapacity() {
        return capacity_;
      }
      /**
       * <code>optional float capacity = 2;</code>
       * @param value The capacity to set.
       * @return This builder for chaining.
       */
      public Builder setCapacity(float value) {

        capacity_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional float capacity = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearCapacity() {
        bitField0_ = (bitField0_ & ~0x00000002);
        capacity_ = 0F;
        onChanged();
        return this;
      }

      private float maximumCapacity_ ;
      /**
       * <code>optional float maximumCapacity = 3;</code>
       * @return Whether the maximumCapacity field is set.
       */
      @java.lang.Override
      public boolean hasMaximumCapacity() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional float maximumCapacity = 3;</code>
       * @return The maximumCapacity.
       */
      @java.lang.Override
      public float getMaximumCapacity() {
        return maximumCapacity_;
      }
      /**
       * <code>optional float maximumCapacity = 3;</code>
       * @param value The maximumCapacity to set.
       * @return This builder for chaining.
       */
      public Builder setMaximumCapacity(float value) {

        maximumCapacity_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional float maximumCapacity = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearMaximumCapacity() {
        bitField0_ = (bitField0_ & ~0x00000004);
        maximumCapacity_ = 0F;
        onChanged();
        return this;
      }

      private float currentCapacity_ ;
      /**
       * <code>optional float currentCapacity = 4;</code>
       * @return Whether the currentCapacity field is set.
       */
      @java.lang.Override
      public boolean hasCurrentCapacity() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional float currentCapacity = 4;</code>
       * @return The currentCapacity.
       */
      @java.lang.Override
      public float getCurrentCapacity() {
        return currentCapacity_;
      }
      /**
       * <code>optional float currentCapacity = 4;</code>
       * @param value The currentCapacity to set.
       * @return This builder for chaining.
       */
      public Builder setCurrentCapacity(float value) {

        currentCapacity_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional float currentCapacity = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearCurrentCapacity() {
        bitField0_ = (bitField0_ & ~0x00000008);
        currentCapacity_ = 0F;
        onChanged();
        return this;
      }

      private int state_ = 1;
      /**
       * <code>optional .hadoop.yarn.QueueStateProto state = 5;</code>
       * @return Whether the state field is set.
       */
      @java.lang.Override public boolean hasState() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.QueueStateProto state = 5;</code>
       * @return The state.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto getState() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto.forNumber(state_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto.Q_STOPPED : result;
      }
      /**
       * <code>optional .hadoop.yarn.QueueStateProto state = 5;</code>
       * @param value The state to set.
       * @return This builder for chaining.
       */
      public Builder setState(org.apache.hadoop.yarn.proto.YarnProtos.QueueStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000010;
        state_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueStateProto state = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearState() {
        bitField0_ = (bitField0_ & ~0x00000010);
        state_ = 1;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto> childQueues_ =
        java.util.Collections.emptyList();
      private void ensureChildQueuesIsMutable() {
        if (!((bitField0_ & 0x00000020) != 0)) {
          childQueues_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto>(childQueues_);
          bitField0_ |= 0x00000020;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> childQueuesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto> getChildQueuesList() {
        if (childQueuesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(childQueues_);
        } else {
          return childQueuesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public int getChildQueuesCount() {
        if (childQueuesBuilder_ == null) {
          return childQueues_.size();
        } else {
          return childQueuesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getChildQueues(int index) {
        if (childQueuesBuilder_ == null) {
          return childQueues_.get(index);
        } else {
          return childQueuesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public Builder setChildQueues(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto value) {
        if (childQueuesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureChildQueuesIsMutable();
          childQueues_.set(index, value);
          onChanged();
        } else {
          childQueuesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public Builder setChildQueues(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder builderForValue) {
        if (childQueuesBuilder_ == null) {
          ensureChildQueuesIsMutable();
          childQueues_.set(index, builderForValue.build());
          onChanged();
        } else {
          childQueuesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public Builder addChildQueues(org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto value) {
        if (childQueuesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureChildQueuesIsMutable();
          childQueues_.add(value);
          onChanged();
        } else {
          childQueuesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public Builder addChildQueues(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto value) {
        if (childQueuesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureChildQueuesIsMutable();
          childQueues_.add(index, value);
          onChanged();
        } else {
          childQueuesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public Builder addChildQueues(
          org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder builderForValue) {
        if (childQueuesBuilder_ == null) {
          ensureChildQueuesIsMutable();
          childQueues_.add(builderForValue.build());
          onChanged();
        } else {
          childQueuesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public Builder addChildQueues(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder builderForValue) {
        if (childQueuesBuilder_ == null) {
          ensureChildQueuesIsMutable();
          childQueues_.add(index, builderForValue.build());
          onChanged();
        } else {
          childQueuesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public Builder addAllChildQueues(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto> values) {
        if (childQueuesBuilder_ == null) {
          ensureChildQueuesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, childQueues_);
          onChanged();
        } else {
          childQueuesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public Builder clearChildQueues() {
        if (childQueuesBuilder_ == null) {
          childQueues_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000020);
          onChanged();
        } else {
          childQueuesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public Builder removeChildQueues(int index) {
        if (childQueuesBuilder_ == null) {
          ensureChildQueuesIsMutable();
          childQueues_.remove(index);
          onChanged();
        } else {
          childQueuesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder getChildQueuesBuilder(
          int index) {
        return getChildQueuesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder getChildQueuesOrBuilder(
          int index) {
        if (childQueuesBuilder_ == null) {
          return childQueues_.get(index);  } else {
          return childQueuesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> 
           getChildQueuesOrBuilderList() {
        if (childQueuesBuilder_ != null) {
          return childQueuesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(childQueues_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder addChildQueuesBuilder() {
        return getChildQueuesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder addChildQueuesBuilder(
          int index) {
        return getChildQueuesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.QueueInfoProto childQueues = 6;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder> 
           getChildQueuesBuilderList() {
        return getChildQueuesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder> 
          getChildQueuesFieldBuilder() {
        if (childQueuesBuilder_ == null) {
          childQueuesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProtoOrBuilder>(
                  childQueues_,
                  ((bitField0_ & 0x00000020) != 0),
                  getParentForChildren(),
                  isClean());
          childQueues_ = null;
        }
        return childQueuesBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> applications_ =
        java.util.Collections.emptyList();
      private void ensureApplicationsIsMutable() {
        if (!((bitField0_ & 0x00000040) != 0)) {
          applications_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto>(applications_);
          bitField0_ |= 0x00000040;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> applicationsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> getApplicationsList() {
        if (applicationsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applications_);
        } else {
          return applicationsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public int getApplicationsCount() {
        if (applicationsBuilder_ == null) {
          return applications_.size();
        } else {
          return applicationsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto getApplications(int index) {
        if (applicationsBuilder_ == null) {
          return applications_.get(index);
        } else {
          return applicationsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public Builder setApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) {
        if (applicationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationsIsMutable();
          applications_.set(index, value);
          onChanged();
        } else {
          applicationsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public Builder setApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) {
        if (applicationsBuilder_ == null) {
          ensureApplicationsIsMutable();
          applications_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public Builder addApplications(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) {
        if (applicationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationsIsMutable();
          applications_.add(value);
          onChanged();
        } else {
          applicationsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public Builder addApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto value) {
        if (applicationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationsIsMutable();
          applications_.add(index, value);
          onChanged();
        } else {
          applicationsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public Builder addApplications(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) {
        if (applicationsBuilder_ == null) {
          ensureApplicationsIsMutable();
          applications_.add(builderForValue.build());
          onChanged();
        } else {
          applicationsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public Builder addApplications(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder builderForValue) {
        if (applicationsBuilder_ == null) {
          ensureApplicationsIsMutable();
          applications_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public Builder addAllApplications(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto> values) {
        if (applicationsBuilder_ == null) {
          ensureApplicationsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applications_);
          onChanged();
        } else {
          applicationsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public Builder clearApplications() {
        if (applicationsBuilder_ == null) {
          applications_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000040);
          onChanged();
        } else {
          applicationsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public Builder removeApplications(int index) {
        if (applicationsBuilder_ == null) {
          ensureApplicationsIsMutable();
          applications_.remove(index);
          onChanged();
        } else {
          applicationsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder getApplicationsBuilder(
          int index) {
        return getApplicationsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder getApplicationsOrBuilder(
          int index) {
        if (applicationsBuilder_ == null) {
          return applications_.get(index);  } else {
          return applicationsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> 
           getApplicationsOrBuilderList() {
        if (applicationsBuilder_ != null) {
          return applicationsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applications_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder addApplicationsBuilder() {
        return getApplicationsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder addApplicationsBuilder(
          int index) {
        return getApplicationsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationReportProto applications = 7;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder> 
           getApplicationsBuilderList() {
        return getApplicationsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder> 
          getApplicationsFieldBuilder() {
        if (applicationsBuilder_ == null) {
          applicationsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationReportProtoOrBuilder>(
                  applications_,
                  ((bitField0_ & 0x00000040) != 0),
                  getParentForChildren(),
                  isClean());
          applications_ = null;
        }
        return applicationsBuilder_;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList accessibleNodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureAccessibleNodeLabelsIsMutable() {
        if (!accessibleNodeLabels_.isModifiable()) {
          accessibleNodeLabels_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(accessibleNodeLabels_);
        }
        bitField0_ |= 0x00000080;
      }
      /**
       * <code>repeated string accessibleNodeLabels = 8;</code>
       * @return A list containing the accessibleNodeLabels.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getAccessibleNodeLabelsList() {
        accessibleNodeLabels_.makeImmutable();
        return accessibleNodeLabels_;
      }
      /**
       * <code>repeated string accessibleNodeLabels = 8;</code>
       * @return The count of accessibleNodeLabels.
       */
      public int getAccessibleNodeLabelsCount() {
        return accessibleNodeLabels_.size();
      }
      /**
       * <code>repeated string accessibleNodeLabels = 8;</code>
       * @param index The index of the element to return.
       * @return The accessibleNodeLabels at the given index.
       */
      public java.lang.String getAccessibleNodeLabels(int index) {
        return accessibleNodeLabels_.get(index);
      }
      /**
       * <code>repeated string accessibleNodeLabels = 8;</code>
       * @param index The index of the value to return.
       * @return The bytes of the accessibleNodeLabels at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAccessibleNodeLabelsBytes(int index) {
        return accessibleNodeLabels_.getByteString(index);
      }
      /**
       * <code>repeated string accessibleNodeLabels = 8;</code>
       * @param index The index to set the value at.
       * @param value The accessibleNodeLabels to set.
       * @return This builder for chaining.
       */
      public Builder setAccessibleNodeLabels(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAccessibleNodeLabelsIsMutable();
        accessibleNodeLabels_.set(index, value);
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string accessibleNodeLabels = 8;</code>
       * @param value The accessibleNodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addAccessibleNodeLabels(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAccessibleNodeLabelsIsMutable();
        accessibleNodeLabels_.add(value);
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string accessibleNodeLabels = 8;</code>
       * @param values The accessibleNodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addAllAccessibleNodeLabels(
          java.lang.Iterable<java.lang.String> values) {
        ensureAccessibleNodeLabelsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, accessibleNodeLabels_);
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string accessibleNodeLabels = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearAccessibleNodeLabels() {
        accessibleNodeLabels_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000080);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string accessibleNodeLabels = 8;</code>
       * @param value The bytes of the accessibleNodeLabels to add.
       * @return This builder for chaining.
       */
      public Builder addAccessibleNodeLabelsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAccessibleNodeLabelsIsMutable();
        accessibleNodeLabels_.add(value);
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }

      private java.lang.Object defaultNodeLabelExpression_ = "";
      /**
       * <code>optional string defaultNodeLabelExpression = 9;</code>
       * @return Whether the defaultNodeLabelExpression field is set.
       */
      public boolean hasDefaultNodeLabelExpression() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional string defaultNodeLabelExpression = 9;</code>
       * @return The defaultNodeLabelExpression.
       */
      public java.lang.String getDefaultNodeLabelExpression() {
        java.lang.Object ref = defaultNodeLabelExpression_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            defaultNodeLabelExpression_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string defaultNodeLabelExpression = 9;</code>
       * @return The bytes for defaultNodeLabelExpression.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDefaultNodeLabelExpressionBytes() {
        java.lang.Object ref = defaultNodeLabelExpression_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          defaultNodeLabelExpression_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string defaultNodeLabelExpression = 9;</code>
       * @param value The defaultNodeLabelExpression to set.
       * @return This builder for chaining.
       */
      public Builder setDefaultNodeLabelExpression(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        defaultNodeLabelExpression_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional string defaultNodeLabelExpression = 9;</code>
       * @return This builder for chaining.
       */
      public Builder clearDefaultNodeLabelExpression() {
        defaultNodeLabelExpression_ = getDefaultInstance().getDefaultNodeLabelExpression();
        bitField0_ = (bitField0_ & ~0x00000100);
        onChanged();
        return this;
      }
      /**
       * <code>optional string defaultNodeLabelExpression = 9;</code>
       * @param value The bytes for defaultNodeLabelExpression to set.
       * @return This builder for chaining.
       */
      public Builder setDefaultNodeLabelExpressionBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        defaultNodeLabelExpression_ = value;
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto queueStatistics_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder> queueStatisticsBuilder_;
      /**
       * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
       * @return Whether the queueStatistics field is set.
       */
      public boolean hasQueueStatistics() {
        return ((bitField0_ & 0x00000200) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
       * @return The queueStatistics.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto getQueueStatistics() {
        if (queueStatisticsBuilder_ == null) {
          return queueStatistics_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance() : queueStatistics_;
        } else {
          return queueStatisticsBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
       */
      public Builder setQueueStatistics(org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto value) {
        if (queueStatisticsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          queueStatistics_ = value;
        } else {
          queueStatisticsBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
       */
      public Builder setQueueStatistics(
          org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder builderForValue) {
        if (queueStatisticsBuilder_ == null) {
          queueStatistics_ = builderForValue.build();
        } else {
          queueStatisticsBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000200;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
       */
      public Builder mergeQueueStatistics(org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto value) {
        if (queueStatisticsBuilder_ == null) {
          if (((bitField0_ & 0x00000200) != 0) &&
            queueStatistics_ != null &&
            queueStatistics_ != org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance()) {
            getQueueStatisticsBuilder().mergeFrom(value);
          } else {
            queueStatistics_ = value;
          }
        } else {
          queueStatisticsBuilder_.mergeFrom(value);
        }
        if (queueStatistics_ != null) {
          bitField0_ |= 0x00000200;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
       */
      public Builder clearQueueStatistics() {
        bitField0_ = (bitField0_ & ~0x00000200);
        queueStatistics_ = null;
        if (queueStatisticsBuilder_ != null) {
          queueStatisticsBuilder_.dispose();
          queueStatisticsBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder getQueueStatisticsBuilder() {
        bitField0_ |= 0x00000200;
        onChanged();
        return getQueueStatisticsFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder getQueueStatisticsOrBuilder() {
        if (queueStatisticsBuilder_ != null) {
          return queueStatisticsBuilder_.getMessageOrBuilder();
        } else {
          return queueStatistics_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.getDefaultInstance() : queueStatistics_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.QueueStatisticsProto queueStatistics = 10;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder> 
          getQueueStatisticsFieldBuilder() {
        if (queueStatisticsBuilder_ == null) {
          queueStatisticsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueStatisticsProtoOrBuilder>(
                  getQueueStatistics(),
                  getParentForChildren(),
                  isClean());
          queueStatistics_ = null;
        }
        return queueStatisticsBuilder_;
      }

      private boolean preemptionDisabled_ ;
      /**
       * <code>optional bool preemptionDisabled = 11;</code>
       * @return Whether the preemptionDisabled field is set.
       */
      @java.lang.Override
      public boolean hasPreemptionDisabled() {
        return ((bitField0_ & 0x00000400) != 0);
      }
      /**
       * <code>optional bool preemptionDisabled = 11;</code>
       * @return The preemptionDisabled.
       */
      @java.lang.Override
      public boolean getPreemptionDisabled() {
        return preemptionDisabled_;
      }
      /**
       * <code>optional bool preemptionDisabled = 11;</code>
       * @param value The preemptionDisabled to set.
       * @return This builder for chaining.
       */
      public Builder setPreemptionDisabled(boolean value) {

        preemptionDisabled_ = value;
        bitField0_ |= 0x00000400;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool preemptionDisabled = 11;</code>
       * @return This builder for chaining.
       */
      public Builder clearPreemptionDisabled() {
        bitField0_ = (bitField0_ & ~0x00000400);
        preemptionDisabled_ = false;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto> queueConfigurationsMap_ =
        java.util.Collections.emptyList();
      private void ensureQueueConfigurationsMapIsMutable() {
        if (!((bitField0_ & 0x00000800) != 0)) {
          queueConfigurationsMap_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto>(queueConfigurationsMap_);
          bitField0_ |= 0x00000800;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder> queueConfigurationsMapBuilder_;

      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto> getQueueConfigurationsMapList() {
        if (queueConfigurationsMapBuilder_ == null) {
          return java.util.Collections.unmodifiableList(queueConfigurationsMap_);
        } else {
          return queueConfigurationsMapBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public int getQueueConfigurationsMapCount() {
        if (queueConfigurationsMapBuilder_ == null) {
          return queueConfigurationsMap_.size();
        } else {
          return queueConfigurationsMapBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getQueueConfigurationsMap(int index) {
        if (queueConfigurationsMapBuilder_ == null) {
          return queueConfigurationsMap_.get(index);
        } else {
          return queueConfigurationsMapBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public Builder setQueueConfigurationsMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto value) {
        if (queueConfigurationsMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureQueueConfigurationsMapIsMutable();
          queueConfigurationsMap_.set(index, value);
          onChanged();
        } else {
          queueConfigurationsMapBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public Builder setQueueConfigurationsMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder builderForValue) {
        if (queueConfigurationsMapBuilder_ == null) {
          ensureQueueConfigurationsMapIsMutable();
          queueConfigurationsMap_.set(index, builderForValue.build());
          onChanged();
        } else {
          queueConfigurationsMapBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public Builder addQueueConfigurationsMap(org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto value) {
        if (queueConfigurationsMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureQueueConfigurationsMapIsMutable();
          queueConfigurationsMap_.add(value);
          onChanged();
        } else {
          queueConfigurationsMapBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public Builder addQueueConfigurationsMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto value) {
        if (queueConfigurationsMapBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureQueueConfigurationsMapIsMutable();
          queueConfigurationsMap_.add(index, value);
          onChanged();
        } else {
          queueConfigurationsMapBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public Builder addQueueConfigurationsMap(
          org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder builderForValue) {
        if (queueConfigurationsMapBuilder_ == null) {
          ensureQueueConfigurationsMapIsMutable();
          queueConfigurationsMap_.add(builderForValue.build());
          onChanged();
        } else {
          queueConfigurationsMapBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public Builder addQueueConfigurationsMap(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder builderForValue) {
        if (queueConfigurationsMapBuilder_ == null) {
          ensureQueueConfigurationsMapIsMutable();
          queueConfigurationsMap_.add(index, builderForValue.build());
          onChanged();
        } else {
          queueConfigurationsMapBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public Builder addAllQueueConfigurationsMap(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto> values) {
        if (queueConfigurationsMapBuilder_ == null) {
          ensureQueueConfigurationsMapIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, queueConfigurationsMap_);
          onChanged();
        } else {
          queueConfigurationsMapBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public Builder clearQueueConfigurationsMap() {
        if (queueConfigurationsMapBuilder_ == null) {
          queueConfigurationsMap_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000800);
          onChanged();
        } else {
          queueConfigurationsMapBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public Builder removeQueueConfigurationsMap(int index) {
        if (queueConfigurationsMapBuilder_ == null) {
          ensureQueueConfigurationsMapIsMutable();
          queueConfigurationsMap_.remove(index);
          onChanged();
        } else {
          queueConfigurationsMapBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder getQueueConfigurationsMapBuilder(
          int index) {
        return getQueueConfigurationsMapFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder getQueueConfigurationsMapOrBuilder(
          int index) {
        if (queueConfigurationsMapBuilder_ == null) {
          return queueConfigurationsMap_.get(index);  } else {
          return queueConfigurationsMapBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder> 
           getQueueConfigurationsMapOrBuilderList() {
        if (queueConfigurationsMapBuilder_ != null) {
          return queueConfigurationsMapBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(queueConfigurationsMap_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder addQueueConfigurationsMapBuilder() {
        return getQueueConfigurationsMapFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder addQueueConfigurationsMapBuilder(
          int index) {
        return getQueueConfigurationsMapFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.QueueConfigurationsMapProto queueConfigurationsMap = 12;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder> 
           getQueueConfigurationsMapBuilderList() {
        return getQueueConfigurationsMapFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder> 
          getQueueConfigurationsMapFieldBuilder() {
        if (queueConfigurationsMapBuilder_ == null) {
          queueConfigurationsMapBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder>(
                  queueConfigurationsMap_,
                  ((bitField0_ & 0x00000800) != 0),
                  getParentForChildren(),
                  isClean());
          queueConfigurationsMap_ = null;
        }
        return queueConfigurationsMapBuilder_;
      }

      private boolean intraQueuePreemptionDisabled_ ;
      /**
       * <code>optional bool intraQueuePreemptionDisabled = 13;</code>
       * @return Whether the intraQueuePreemptionDisabled field is set.
       */
      @java.lang.Override
      public boolean hasIntraQueuePreemptionDisabled() {
        return ((bitField0_ & 0x00001000) != 0);
      }
      /**
       * <code>optional bool intraQueuePreemptionDisabled = 13;</code>
       * @return The intraQueuePreemptionDisabled.
       */
      @java.lang.Override
      public boolean getIntraQueuePreemptionDisabled() {
        return intraQueuePreemptionDisabled_;
      }
      /**
       * <code>optional bool intraQueuePreemptionDisabled = 13;</code>
       * @param value The intraQueuePreemptionDisabled to set.
       * @return This builder for chaining.
       */
      public Builder setIntraQueuePreemptionDisabled(boolean value) {

        intraQueuePreemptionDisabled_ = value;
        bitField0_ |= 0x00001000;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool intraQueuePreemptionDisabled = 13;</code>
       * @return This builder for chaining.
       */
      public Builder clearIntraQueuePreemptionDisabled() {
        bitField0_ = (bitField0_ & ~0x00001000);
        intraQueuePreemptionDisabled_ = false;
        onChanged();
        return this;
      }

      private float weight_ ;
      /**
       * <code>optional float weight = 14;</code>
       * @return Whether the weight field is set.
       */
      @java.lang.Override
      public boolean hasWeight() {
        return ((bitField0_ & 0x00002000) != 0);
      }
      /**
       * <code>optional float weight = 14;</code>
       * @return The weight.
       */
      @java.lang.Override
      public float getWeight() {
        return weight_;
      }
      /**
       * <code>optional float weight = 14;</code>
       * @param value The weight to set.
       * @return This builder for chaining.
       */
      public Builder setWeight(float value) {

        weight_ = value;
        bitField0_ |= 0x00002000;
        onChanged();
        return this;
      }
      /**
       * <code>optional float weight = 14;</code>
       * @return This builder for chaining.
       */
      public Builder clearWeight() {
        bitField0_ = (bitField0_ & ~0x00002000);
        weight_ = 0F;
        onChanged();
        return this;
      }

      private java.lang.Object queuePath_ = "";
      /**
       * <code>optional string queuePath = 15;</code>
       * @return Whether the queuePath field is set.
       */
      public boolean hasQueuePath() {
        return ((bitField0_ & 0x00004000) != 0);
      }
      /**
       * <code>optional string queuePath = 15;</code>
       * @return The queuePath.
       */
      public java.lang.String getQueuePath() {
        java.lang.Object ref = queuePath_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queuePath_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string queuePath = 15;</code>
       * @return The bytes for queuePath.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueuePathBytes() {
        java.lang.Object ref = queuePath_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queuePath_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string queuePath = 15;</code>
       * @param value The queuePath to set.
       * @return This builder for chaining.
       */
      public Builder setQueuePath(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        queuePath_ = value;
        bitField0_ |= 0x00004000;
        onChanged();
        return this;
      }
      /**
       * <code>optional string queuePath = 15;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueuePath() {
        queuePath_ = getDefaultInstance().getQueuePath();
        bitField0_ = (bitField0_ & ~0x00004000);
        onChanged();
        return this;
      }
      /**
       * <code>optional string queuePath = 15;</code>
       * @param value The bytes for queuePath to set.
       * @return This builder for chaining.
       */
      public Builder setQueuePathBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        queuePath_ = value;
        bitField0_ |= 0x00004000;
        onChanged();
        return this;
      }

      private int maxParallelApps_ ;
      /**
       * <code>optional int32 maxParallelApps = 16;</code>
       * @return Whether the maxParallelApps field is set.
       */
      @java.lang.Override
      public boolean hasMaxParallelApps() {
        return ((bitField0_ & 0x00008000) != 0);
      }
      /**
       * <code>optional int32 maxParallelApps = 16;</code>
       * @return The maxParallelApps.
       */
      @java.lang.Override
      public int getMaxParallelApps() {
        return maxParallelApps_;
      }
      /**
       * <code>optional int32 maxParallelApps = 16;</code>
       * @param value The maxParallelApps to set.
       * @return This builder for chaining.
       */
      public Builder setMaxParallelApps(int value) {

        maxParallelApps_ = value;
        bitField0_ |= 0x00008000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 maxParallelApps = 16;</code>
       * @return This builder for chaining.
       */
      public Builder clearMaxParallelApps() {
        bitField0_ = (bitField0_ & ~0x00008000);
        maxParallelApps_ = 0;
        onChanged();
        return this;
      }

      private java.lang.Object schedulerType_ = "";
      /**
       * <code>optional string schedulerType = 17;</code>
       * @return Whether the schedulerType field is set.
       */
      public boolean hasSchedulerType() {
        return ((bitField0_ & 0x00010000) != 0);
      }
      /**
       * <code>optional string schedulerType = 17;</code>
       * @return The schedulerType.
       */
      public java.lang.String getSchedulerType() {
        java.lang.Object ref = schedulerType_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            schedulerType_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string schedulerType = 17;</code>
       * @return The bytes for schedulerType.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getSchedulerTypeBytes() {
        java.lang.Object ref = schedulerType_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          schedulerType_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string schedulerType = 17;</code>
       * @param value The schedulerType to set.
       * @return This builder for chaining.
       */
      public Builder setSchedulerType(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        schedulerType_ = value;
        bitField0_ |= 0x00010000;
        onChanged();
        return this;
      }
      /**
       * <code>optional string schedulerType = 17;</code>
       * @return This builder for chaining.
       */
      public Builder clearSchedulerType() {
        schedulerType_ = getDefaultInstance().getSchedulerType();
        bitField0_ = (bitField0_ & ~0x00010000);
        onChanged();
        return this;
      }
      /**
       * <code>optional string schedulerType = 17;</code>
       * @param value The bytes for schedulerType to set.
       * @return This builder for chaining.
       */
      public Builder setSchedulerTypeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        schedulerType_ = value;
        bitField0_ |= 0x00010000;
        onChanged();
        return this;
      }

      private int minResourceVCore_ ;
      /**
       * <code>optional int32 minResourceVCore = 18;</code>
       * @return Whether the minResourceVCore field is set.
       */
      @java.lang.Override
      public boolean hasMinResourceVCore() {
        return ((bitField0_ & 0x00020000) != 0);
      }
      /**
       * <code>optional int32 minResourceVCore = 18;</code>
       * @return The minResourceVCore.
       */
      @java.lang.Override
      public int getMinResourceVCore() {
        return minResourceVCore_;
      }
      /**
       * <code>optional int32 minResourceVCore = 18;</code>
       * @param value The minResourceVCore to set.
       * @return This builder for chaining.
       */
      public Builder setMinResourceVCore(int value) {

        minResourceVCore_ = value;
        bitField0_ |= 0x00020000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 minResourceVCore = 18;</code>
       * @return This builder for chaining.
       */
      public Builder clearMinResourceVCore() {
        bitField0_ = (bitField0_ & ~0x00020000);
        minResourceVCore_ = 0;
        onChanged();
        return this;
      }

      private long minResourceMemory_ ;
      /**
       * <code>optional int64 minResourceMemory = 19;</code>
       * @return Whether the minResourceMemory field is set.
       */
      @java.lang.Override
      public boolean hasMinResourceMemory() {
        return ((bitField0_ & 0x00040000) != 0);
      }
      /**
       * <code>optional int64 minResourceMemory = 19;</code>
       * @return The minResourceMemory.
       */
      @java.lang.Override
      public long getMinResourceMemory() {
        return minResourceMemory_;
      }
      /**
       * <code>optional int64 minResourceMemory = 19;</code>
       * @param value The minResourceMemory to set.
       * @return This builder for chaining.
       */
      public Builder setMinResourceMemory(long value) {

        minResourceMemory_ = value;
        bitField0_ |= 0x00040000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 minResourceMemory = 19;</code>
       * @return This builder for chaining.
       */
      public Builder clearMinResourceMemory() {
        bitField0_ = (bitField0_ & ~0x00040000);
        minResourceMemory_ = 0L;
        onChanged();
        return this;
      }

      private int maxResourceVCore_ ;
      /**
       * <code>optional int32 maxResourceVCore = 20;</code>
       * @return Whether the maxResourceVCore field is set.
       */
      @java.lang.Override
      public boolean hasMaxResourceVCore() {
        return ((bitField0_ & 0x00080000) != 0);
      }
      /**
       * <code>optional int32 maxResourceVCore = 20;</code>
       * @return The maxResourceVCore.
       */
      @java.lang.Override
      public int getMaxResourceVCore() {
        return maxResourceVCore_;
      }
      /**
       * <code>optional int32 maxResourceVCore = 20;</code>
       * @param value The maxResourceVCore to set.
       * @return This builder for chaining.
       */
      public Builder setMaxResourceVCore(int value) {

        maxResourceVCore_ = value;
        bitField0_ |= 0x00080000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 maxResourceVCore = 20;</code>
       * @return This builder for chaining.
       */
      public Builder clearMaxResourceVCore() {
        bitField0_ = (bitField0_ & ~0x00080000);
        maxResourceVCore_ = 0;
        onChanged();
        return this;
      }

      private long maxResourceMemory_ ;
      /**
       * <code>optional int64 maxResourceMemory = 21;</code>
       * @return Whether the maxResourceMemory field is set.
       */
      @java.lang.Override
      public boolean hasMaxResourceMemory() {
        return ((bitField0_ & 0x00100000) != 0);
      }
      /**
       * <code>optional int64 maxResourceMemory = 21;</code>
       * @return The maxResourceMemory.
       */
      @java.lang.Override
      public long getMaxResourceMemory() {
        return maxResourceMemory_;
      }
      /**
       * <code>optional int64 maxResourceMemory = 21;</code>
       * @param value The maxResourceMemory to set.
       * @return This builder for chaining.
       */
      public Builder setMaxResourceMemory(long value) {

        maxResourceMemory_ = value;
        bitField0_ |= 0x00100000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 maxResourceMemory = 21;</code>
       * @return This builder for chaining.
       */
      public Builder clearMaxResourceMemory() {
        bitField0_ = (bitField0_ & ~0x00100000);
        maxResourceMemory_ = 0L;
        onChanged();
        return this;
      }

      private int reservedResourceVCore_ ;
      /**
       * <code>optional int32 reservedResourceVCore = 22;</code>
       * @return Whether the reservedResourceVCore field is set.
       */
      @java.lang.Override
      public boolean hasReservedResourceVCore() {
        return ((bitField0_ & 0x00200000) != 0);
      }
      /**
       * <code>optional int32 reservedResourceVCore = 22;</code>
       * @return The reservedResourceVCore.
       */
      @java.lang.Override
      public int getReservedResourceVCore() {
        return reservedResourceVCore_;
      }
      /**
       * <code>optional int32 reservedResourceVCore = 22;</code>
       * @param value The reservedResourceVCore to set.
       * @return This builder for chaining.
       */
      public Builder setReservedResourceVCore(int value) {

        reservedResourceVCore_ = value;
        bitField0_ |= 0x00200000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 reservedResourceVCore = 22;</code>
       * @return This builder for chaining.
       */
      public Builder clearReservedResourceVCore() {
        bitField0_ = (bitField0_ & ~0x00200000);
        reservedResourceVCore_ = 0;
        onChanged();
        return this;
      }

      private long reservedResourceMemory_ ;
      /**
       * <code>optional int64 reservedResourceMemory = 23;</code>
       * @return Whether the reservedResourceMemory field is set.
       */
      @java.lang.Override
      public boolean hasReservedResourceMemory() {
        return ((bitField0_ & 0x00400000) != 0);
      }
      /**
       * <code>optional int64 reservedResourceMemory = 23;</code>
       * @return The reservedResourceMemory.
       */
      @java.lang.Override
      public long getReservedResourceMemory() {
        return reservedResourceMemory_;
      }
      /**
       * <code>optional int64 reservedResourceMemory = 23;</code>
       * @param value The reservedResourceMemory to set.
       * @return This builder for chaining.
       */
      public Builder setReservedResourceMemory(long value) {

        reservedResourceMemory_ = value;
        bitField0_ |= 0x00400000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 reservedResourceMemory = 23;</code>
       * @return This builder for chaining.
       */
      public Builder clearReservedResourceMemory() {
        bitField0_ = (bitField0_ & ~0x00400000);
        reservedResourceMemory_ = 0L;
        onChanged();
        return this;
      }

      private int steadyFairShareVCore_ ;
      /**
       * <code>optional int32 steadyFairShareVCore = 24;</code>
       * @return Whether the steadyFairShareVCore field is set.
       */
      @java.lang.Override
      public boolean hasSteadyFairShareVCore() {
        return ((bitField0_ & 0x00800000) != 0);
      }
      /**
       * <code>optional int32 steadyFairShareVCore = 24;</code>
       * @return The steadyFairShareVCore.
       */
      @java.lang.Override
      public int getSteadyFairShareVCore() {
        return steadyFairShareVCore_;
      }
      /**
       * <code>optional int32 steadyFairShareVCore = 24;</code>
       * @param value The steadyFairShareVCore to set.
       * @return This builder for chaining.
       */
      public Builder setSteadyFairShareVCore(int value) {

        steadyFairShareVCore_ = value;
        bitField0_ |= 0x00800000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 steadyFairShareVCore = 24;</code>
       * @return This builder for chaining.
       */
      public Builder clearSteadyFairShareVCore() {
        bitField0_ = (bitField0_ & ~0x00800000);
        steadyFairShareVCore_ = 0;
        onChanged();
        return this;
      }

      private long steadyFairShareMemory_ ;
      /**
       * <code>optional int64 steadyFairShareMemory = 25;</code>
       * @return Whether the steadyFairShareMemory field is set.
       */
      @java.lang.Override
      public boolean hasSteadyFairShareMemory() {
        return ((bitField0_ & 0x01000000) != 0);
      }
      /**
       * <code>optional int64 steadyFairShareMemory = 25;</code>
       * @return The steadyFairShareMemory.
       */
      @java.lang.Override
      public long getSteadyFairShareMemory() {
        return steadyFairShareMemory_;
      }
      /**
       * <code>optional int64 steadyFairShareMemory = 25;</code>
       * @param value The steadyFairShareMemory to set.
       * @return This builder for chaining.
       */
      public Builder setSteadyFairShareMemory(long value) {

        steadyFairShareMemory_ = value;
        bitField0_ |= 0x01000000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 steadyFairShareMemory = 25;</code>
       * @return This builder for chaining.
       */
      public Builder clearSteadyFairShareMemory() {
        bitField0_ = (bitField0_ & ~0x01000000);
        steadyFairShareMemory_ = 0L;
        onChanged();
        return this;
      }

      private java.lang.Object subClusterId_ = "";
      /**
       * <code>optional string subClusterId = 26;</code>
       * @return Whether the subClusterId field is set.
       */
      public boolean hasSubClusterId() {
        return ((bitField0_ & 0x02000000) != 0);
      }
      /**
       * <code>optional string subClusterId = 26;</code>
       * @return The subClusterId.
       */
      public java.lang.String getSubClusterId() {
        java.lang.Object ref = subClusterId_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            subClusterId_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string subClusterId = 26;</code>
       * @return The bytes for subClusterId.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getSubClusterIdBytes() {
        java.lang.Object ref = subClusterId_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          subClusterId_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string subClusterId = 26;</code>
       * @param value The subClusterId to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterId(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterId_ = value;
        bitField0_ |= 0x02000000;
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterId = 26;</code>
       * @return This builder for chaining.
       */
      public Builder clearSubClusterId() {
        subClusterId_ = getDefaultInstance().getSubClusterId();
        bitField0_ = (bitField0_ & ~0x02000000);
        onChanged();
        return this;
      }
      /**
       * <code>optional string subClusterId = 26;</code>
       * @param value The bytes for subClusterId to set.
       * @return This builder for chaining.
       */
      public Builder setSubClusterIdBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        subClusterId_ = value;
        bitField0_ |= 0x02000000;
        onChanged();
        return this;
      }

      private int maxRunningApp_ ;
      /**
       * <code>optional int32 maxRunningApp = 27;</code>
       * @return Whether the maxRunningApp field is set.
       */
      @java.lang.Override
      public boolean hasMaxRunningApp() {
        return ((bitField0_ & 0x04000000) != 0);
      }
      /**
       * <code>optional int32 maxRunningApp = 27;</code>
       * @return The maxRunningApp.
       */
      @java.lang.Override
      public int getMaxRunningApp() {
        return maxRunningApp_;
      }
      /**
       * <code>optional int32 maxRunningApp = 27;</code>
       * @param value The maxRunningApp to set.
       * @return This builder for chaining.
       */
      public Builder setMaxRunningApp(int value) {

        maxRunningApp_ = value;
        bitField0_ |= 0x04000000;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 maxRunningApp = 27;</code>
       * @return This builder for chaining.
       */
      public Builder clearMaxRunningApp() {
        bitField0_ = (bitField0_ & ~0x04000000);
        maxRunningApp_ = 0;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.QueueInfoProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.QueueInfoProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<QueueInfoProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<QueueInfoProto>() {
      @java.lang.Override
      public QueueInfoProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<QueueInfoProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<QueueInfoProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface QueueConfigurationsProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.QueueConfigurationsProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional float capacity = 1;</code>
     * @return Whether the capacity field is set.
     */
    boolean hasCapacity();
    /**
     * <code>optional float capacity = 1;</code>
     * @return The capacity.
     */
    float getCapacity();

    /**
     * <code>optional float absoluteCapacity = 2;</code>
     * @return Whether the absoluteCapacity field is set.
     */
    boolean hasAbsoluteCapacity();
    /**
     * <code>optional float absoluteCapacity = 2;</code>
     * @return The absoluteCapacity.
     */
    float getAbsoluteCapacity();

    /**
     * <code>optional float maxCapacity = 3;</code>
     * @return Whether the maxCapacity field is set.
     */
    boolean hasMaxCapacity();
    /**
     * <code>optional float maxCapacity = 3;</code>
     * @return The maxCapacity.
     */
    float getMaxCapacity();

    /**
     * <code>optional float absoluteMaxCapacity = 4;</code>
     * @return Whether the absoluteMaxCapacity field is set.
     */
    boolean hasAbsoluteMaxCapacity();
    /**
     * <code>optional float absoluteMaxCapacity = 4;</code>
     * @return The absoluteMaxCapacity.
     */
    float getAbsoluteMaxCapacity();

    /**
     * <code>optional float maxAMPercentage = 5;</code>
     * @return Whether the maxAMPercentage field is set.
     */
    boolean hasMaxAMPercentage();
    /**
     * <code>optional float maxAMPercentage = 5;</code>
     * @return The maxAMPercentage.
     */
    float getMaxAMPercentage();

    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
     * @return Whether the effectiveMinCapacity field is set.
     */
    boolean hasEffectiveMinCapacity();
    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
     * @return The effectiveMinCapacity.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMinCapacity();
    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMinCapacityOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
     * @return Whether the effectiveMaxCapacity field is set.
     */
    boolean hasEffectiveMaxCapacity();
    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
     * @return The effectiveMaxCapacity.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMaxCapacity();
    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMaxCapacityOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
     * @return Whether the configuredMinCapacity field is set.
     */
    boolean hasConfiguredMinCapacity();
    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
     * @return The configuredMinCapacity.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMinCapacity();
    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMinCapacityOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
     * @return Whether the configuredMaxCapacity field is set.
     */
    boolean hasConfiguredMaxCapacity();
    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
     * @return The configuredMaxCapacity.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMaxCapacity();
    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMaxCapacityOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.QueueConfigurationsProto}
   */
  public static final class QueueConfigurationsProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.QueueConfigurationsProto)
      QueueConfigurationsProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use QueueConfigurationsProto.newBuilder() to construct.
    private QueueConfigurationsProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private QueueConfigurationsProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new QueueConfigurationsProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder.class);
    }

    private int bitField0_;
    public static final int CAPACITY_FIELD_NUMBER = 1;
    private float capacity_ = 0F;
    /**
     * <code>optional float capacity = 1;</code>
     * @return Whether the capacity field is set.
     */
    @java.lang.Override
    public boolean hasCapacity() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional float capacity = 1;</code>
     * @return The capacity.
     */
    @java.lang.Override
    public float getCapacity() {
      return capacity_;
    }

    public static final int ABSOLUTECAPACITY_FIELD_NUMBER = 2;
    private float absoluteCapacity_ = 0F;
    /**
     * <code>optional float absoluteCapacity = 2;</code>
     * @return Whether the absoluteCapacity field is set.
     */
    @java.lang.Override
    public boolean hasAbsoluteCapacity() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional float absoluteCapacity = 2;</code>
     * @return The absoluteCapacity.
     */
    @java.lang.Override
    public float getAbsoluteCapacity() {
      return absoluteCapacity_;
    }

    public static final int MAXCAPACITY_FIELD_NUMBER = 3;
    private float maxCapacity_ = 0F;
    /**
     * <code>optional float maxCapacity = 3;</code>
     * @return Whether the maxCapacity field is set.
     */
    @java.lang.Override
    public boolean hasMaxCapacity() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional float maxCapacity = 3;</code>
     * @return The maxCapacity.
     */
    @java.lang.Override
    public float getMaxCapacity() {
      return maxCapacity_;
    }

    public static final int ABSOLUTEMAXCAPACITY_FIELD_NUMBER = 4;
    private float absoluteMaxCapacity_ = 0F;
    /**
     * <code>optional float absoluteMaxCapacity = 4;</code>
     * @return Whether the absoluteMaxCapacity field is set.
     */
    @java.lang.Override
    public boolean hasAbsoluteMaxCapacity() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional float absoluteMaxCapacity = 4;</code>
     * @return The absoluteMaxCapacity.
     */
    @java.lang.Override
    public float getAbsoluteMaxCapacity() {
      return absoluteMaxCapacity_;
    }

    public static final int MAXAMPERCENTAGE_FIELD_NUMBER = 5;
    private float maxAMPercentage_ = 0F;
    /**
     * <code>optional float maxAMPercentage = 5;</code>
     * @return Whether the maxAMPercentage field is set.
     */
    @java.lang.Override
    public boolean hasMaxAMPercentage() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional float maxAMPercentage = 5;</code>
     * @return The maxAMPercentage.
     */
    @java.lang.Override
    public float getMaxAMPercentage() {
      return maxAMPercentage_;
    }

    public static final int EFFECTIVEMINCAPACITY_FIELD_NUMBER = 6;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto effectiveMinCapacity_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
     * @return Whether the effectiveMinCapacity field is set.
     */
    @java.lang.Override
    public boolean hasEffectiveMinCapacity() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
     * @return The effectiveMinCapacity.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMinCapacity() {
      return effectiveMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMinCapacity_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMinCapacityOrBuilder() {
      return effectiveMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMinCapacity_;
    }

    public static final int EFFECTIVEMAXCAPACITY_FIELD_NUMBER = 7;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto effectiveMaxCapacity_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
     * @return Whether the effectiveMaxCapacity field is set.
     */
    @java.lang.Override
    public boolean hasEffectiveMaxCapacity() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
     * @return The effectiveMaxCapacity.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMaxCapacity() {
      return effectiveMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMaxCapacity_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMaxCapacityOrBuilder() {
      return effectiveMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMaxCapacity_;
    }

    public static final int CONFIGUREDMINCAPACITY_FIELD_NUMBER = 8;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto configuredMinCapacity_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
     * @return Whether the configuredMinCapacity field is set.
     */
    @java.lang.Override
    public boolean hasConfiguredMinCapacity() {
      return ((bitField0_ & 0x00000080) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
     * @return The configuredMinCapacity.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMinCapacity() {
      return configuredMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMinCapacity_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMinCapacityOrBuilder() {
      return configuredMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMinCapacity_;
    }

    public static final int CONFIGUREDMAXCAPACITY_FIELD_NUMBER = 9;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto configuredMaxCapacity_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
     * @return Whether the configuredMaxCapacity field is set.
     */
    @java.lang.Override
    public boolean hasConfiguredMaxCapacity() {
      return ((bitField0_ & 0x00000100) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
     * @return The configuredMaxCapacity.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMaxCapacity() {
      return configuredMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMaxCapacity_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMaxCapacityOrBuilder() {
      return configuredMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMaxCapacity_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasEffectiveMinCapacity()) {
        if (!getEffectiveMinCapacity().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasEffectiveMaxCapacity()) {
        if (!getEffectiveMaxCapacity().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasConfiguredMinCapacity()) {
        if (!getConfiguredMinCapacity().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasConfiguredMaxCapacity()) {
        if (!getConfiguredMaxCapacity().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeFloat(1, capacity_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeFloat(2, absoluteCapacity_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeFloat(3, maxCapacity_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeFloat(4, absoluteMaxCapacity_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeFloat(5, maxAMPercentage_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeMessage(6, getEffectiveMinCapacity());
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeMessage(7, getEffectiveMaxCapacity());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        output.writeMessage(8, getConfiguredMinCapacity());
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        output.writeMessage(9, getConfiguredMaxCapacity());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(1, capacity_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(2, absoluteCapacity_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(3, maxCapacity_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(4, absoluteMaxCapacity_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeFloatSize(5, maxAMPercentage_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(6, getEffectiveMinCapacity());
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, getEffectiveMaxCapacity());
      }
      if (((bitField0_ & 0x00000080) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(8, getConfiguredMinCapacity());
      }
      if (((bitField0_ & 0x00000100) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(9, getConfiguredMaxCapacity());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto other = (org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto) obj;

      if (hasCapacity() != other.hasCapacity()) return false;
      if (hasCapacity()) {
        if (java.lang.Float.floatToIntBits(getCapacity())
            != java.lang.Float.floatToIntBits(
                other.getCapacity())) return false;
      }
      if (hasAbsoluteCapacity() != other.hasAbsoluteCapacity()) return false;
      if (hasAbsoluteCapacity()) {
        if (java.lang.Float.floatToIntBits(getAbsoluteCapacity())
            != java.lang.Float.floatToIntBits(
                other.getAbsoluteCapacity())) return false;
      }
      if (hasMaxCapacity() != other.hasMaxCapacity()) return false;
      if (hasMaxCapacity()) {
        if (java.lang.Float.floatToIntBits(getMaxCapacity())
            != java.lang.Float.floatToIntBits(
                other.getMaxCapacity())) return false;
      }
      if (hasAbsoluteMaxCapacity() != other.hasAbsoluteMaxCapacity()) return false;
      if (hasAbsoluteMaxCapacity()) {
        if (java.lang.Float.floatToIntBits(getAbsoluteMaxCapacity())
            != java.lang.Float.floatToIntBits(
                other.getAbsoluteMaxCapacity())) return false;
      }
      if (hasMaxAMPercentage() != other.hasMaxAMPercentage()) return false;
      if (hasMaxAMPercentage()) {
        if (java.lang.Float.floatToIntBits(getMaxAMPercentage())
            != java.lang.Float.floatToIntBits(
                other.getMaxAMPercentage())) return false;
      }
      if (hasEffectiveMinCapacity() != other.hasEffectiveMinCapacity()) return false;
      if (hasEffectiveMinCapacity()) {
        if (!getEffectiveMinCapacity()
            .equals(other.getEffectiveMinCapacity())) return false;
      }
      if (hasEffectiveMaxCapacity() != other.hasEffectiveMaxCapacity()) return false;
      if (hasEffectiveMaxCapacity()) {
        if (!getEffectiveMaxCapacity()
            .equals(other.getEffectiveMaxCapacity())) return false;
      }
      if (hasConfiguredMinCapacity() != other.hasConfiguredMinCapacity()) return false;
      if (hasConfiguredMinCapacity()) {
        if (!getConfiguredMinCapacity()
            .equals(other.getConfiguredMinCapacity())) return false;
      }
      if (hasConfiguredMaxCapacity() != other.hasConfiguredMaxCapacity()) return false;
      if (hasConfiguredMaxCapacity()) {
        if (!getConfiguredMaxCapacity()
            .equals(other.getConfiguredMaxCapacity())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasCapacity()) {
        hash = (37 * hash) + CAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getCapacity());
      }
      if (hasAbsoluteCapacity()) {
        hash = (37 * hash) + ABSOLUTECAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getAbsoluteCapacity());
      }
      if (hasMaxCapacity()) {
        hash = (37 * hash) + MAXCAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getMaxCapacity());
      }
      if (hasAbsoluteMaxCapacity()) {
        hash = (37 * hash) + ABSOLUTEMAXCAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getAbsoluteMaxCapacity());
      }
      if (hasMaxAMPercentage()) {
        hash = (37 * hash) + MAXAMPERCENTAGE_FIELD_NUMBER;
        hash = (53 * hash) + java.lang.Float.floatToIntBits(
            getMaxAMPercentage());
      }
      if (hasEffectiveMinCapacity()) {
        hash = (37 * hash) + EFFECTIVEMINCAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + getEffectiveMinCapacity().hashCode();
      }
      if (hasEffectiveMaxCapacity()) {
        hash = (37 * hash) + EFFECTIVEMAXCAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + getEffectiveMaxCapacity().hashCode();
      }
      if (hasConfiguredMinCapacity()) {
        hash = (37 * hash) + CONFIGUREDMINCAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + getConfiguredMinCapacity().hashCode();
      }
      if (hasConfiguredMaxCapacity()) {
        hash = (37 * hash) + CONFIGUREDMAXCAPACITY_FIELD_NUMBER;
        hash = (53 * hash) + getConfiguredMaxCapacity().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.QueueConfigurationsProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.QueueConfigurationsProto)
        org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getEffectiveMinCapacityFieldBuilder();
          getEffectiveMaxCapacityFieldBuilder();
          getConfiguredMinCapacityFieldBuilder();
          getConfiguredMaxCapacityFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        capacity_ = 0F;
        absoluteCapacity_ = 0F;
        maxCapacity_ = 0F;
        absoluteMaxCapacity_ = 0F;
        maxAMPercentage_ = 0F;
        effectiveMinCapacity_ = null;
        if (effectiveMinCapacityBuilder_ != null) {
          effectiveMinCapacityBuilder_.dispose();
          effectiveMinCapacityBuilder_ = null;
        }
        effectiveMaxCapacity_ = null;
        if (effectiveMaxCapacityBuilder_ != null) {
          effectiveMaxCapacityBuilder_.dispose();
          effectiveMaxCapacityBuilder_ = null;
        }
        configuredMinCapacity_ = null;
        if (configuredMinCapacityBuilder_ != null) {
          configuredMinCapacityBuilder_.dispose();
          configuredMinCapacityBuilder_ = null;
        }
        configuredMaxCapacity_ = null;
        if (configuredMaxCapacityBuilder_ != null) {
          configuredMaxCapacityBuilder_.dispose();
          configuredMaxCapacityBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.capacity_ = capacity_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.absoluteCapacity_ = absoluteCapacity_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.maxCapacity_ = maxCapacity_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.absoluteMaxCapacity_ = absoluteMaxCapacity_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.maxAMPercentage_ = maxAMPercentage_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.effectiveMinCapacity_ = effectiveMinCapacityBuilder_ == null
              ? effectiveMinCapacity_
              : effectiveMinCapacityBuilder_.build();
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.effectiveMaxCapacity_ = effectiveMaxCapacityBuilder_ == null
              ? effectiveMaxCapacity_
              : effectiveMaxCapacityBuilder_.build();
          to_bitField0_ |= 0x00000040;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.configuredMinCapacity_ = configuredMinCapacityBuilder_ == null
              ? configuredMinCapacity_
              : configuredMinCapacityBuilder_.build();
          to_bitField0_ |= 0x00000080;
        }
        if (((from_bitField0_ & 0x00000100) != 0)) {
          result.configuredMaxCapacity_ = configuredMaxCapacityBuilder_ == null
              ? configuredMaxCapacity_
              : configuredMaxCapacityBuilder_.build();
          to_bitField0_ |= 0x00000100;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance()) return this;
        if (other.hasCapacity()) {
          setCapacity(other.getCapacity());
        }
        if (other.hasAbsoluteCapacity()) {
          setAbsoluteCapacity(other.getAbsoluteCapacity());
        }
        if (other.hasMaxCapacity()) {
          setMaxCapacity(other.getMaxCapacity());
        }
        if (other.hasAbsoluteMaxCapacity()) {
          setAbsoluteMaxCapacity(other.getAbsoluteMaxCapacity());
        }
        if (other.hasMaxAMPercentage()) {
          setMaxAMPercentage(other.getMaxAMPercentage());
        }
        if (other.hasEffectiveMinCapacity()) {
          mergeEffectiveMinCapacity(other.getEffectiveMinCapacity());
        }
        if (other.hasEffectiveMaxCapacity()) {
          mergeEffectiveMaxCapacity(other.getEffectiveMaxCapacity());
        }
        if (other.hasConfiguredMinCapacity()) {
          mergeConfiguredMinCapacity(other.getConfiguredMinCapacity());
        }
        if (other.hasConfiguredMaxCapacity()) {
          mergeConfiguredMaxCapacity(other.getConfiguredMaxCapacity());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasEffectiveMinCapacity()) {
          if (!getEffectiveMinCapacity().isInitialized()) {
            return false;
          }
        }
        if (hasEffectiveMaxCapacity()) {
          if (!getEffectiveMaxCapacity().isInitialized()) {
            return false;
          }
        }
        if (hasConfiguredMinCapacity()) {
          if (!getConfiguredMinCapacity().isInitialized()) {
            return false;
          }
        }
        if (hasConfiguredMaxCapacity()) {
          if (!getConfiguredMaxCapacity().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 13: {
                capacity_ = input.readFloat();
                bitField0_ |= 0x00000001;
                break;
              } // case 13
              case 21: {
                absoluteCapacity_ = input.readFloat();
                bitField0_ |= 0x00000002;
                break;
              } // case 21
              case 29: {
                maxCapacity_ = input.readFloat();
                bitField0_ |= 0x00000004;
                break;
              } // case 29
              case 37: {
                absoluteMaxCapacity_ = input.readFloat();
                bitField0_ |= 0x00000008;
                break;
              } // case 37
              case 45: {
                maxAMPercentage_ = input.readFloat();
                bitField0_ |= 0x00000010;
                break;
              } // case 45
              case 50: {
                input.readMessage(
                    getEffectiveMinCapacityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000020;
                break;
              } // case 50
              case 58: {
                input.readMessage(
                    getEffectiveMaxCapacityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000040;
                break;
              } // case 58
              case 66: {
                input.readMessage(
                    getConfiguredMinCapacityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000080;
                break;
              } // case 66
              case 74: {
                input.readMessage(
                    getConfiguredMaxCapacityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000100;
                break;
              } // case 74
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private float capacity_ ;
      /**
       * <code>optional float capacity = 1;</code>
       * @return Whether the capacity field is set.
       */
      @java.lang.Override
      public boolean hasCapacity() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional float capacity = 1;</code>
       * @return The capacity.
       */
      @java.lang.Override
      public float getCapacity() {
        return capacity_;
      }
      /**
       * <code>optional float capacity = 1;</code>
       * @param value The capacity to set.
       * @return This builder for chaining.
       */
      public Builder setCapacity(float value) {

        capacity_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional float capacity = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearCapacity() {
        bitField0_ = (bitField0_ & ~0x00000001);
        capacity_ = 0F;
        onChanged();
        return this;
      }

      private float absoluteCapacity_ ;
      /**
       * <code>optional float absoluteCapacity = 2;</code>
       * @return Whether the absoluteCapacity field is set.
       */
      @java.lang.Override
      public boolean hasAbsoluteCapacity() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional float absoluteCapacity = 2;</code>
       * @return The absoluteCapacity.
       */
      @java.lang.Override
      public float getAbsoluteCapacity() {
        return absoluteCapacity_;
      }
      /**
       * <code>optional float absoluteCapacity = 2;</code>
       * @param value The absoluteCapacity to set.
       * @return This builder for chaining.
       */
      public Builder setAbsoluteCapacity(float value) {

        absoluteCapacity_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional float absoluteCapacity = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearAbsoluteCapacity() {
        bitField0_ = (bitField0_ & ~0x00000002);
        absoluteCapacity_ = 0F;
        onChanged();
        return this;
      }

      private float maxCapacity_ ;
      /**
       * <code>optional float maxCapacity = 3;</code>
       * @return Whether the maxCapacity field is set.
       */
      @java.lang.Override
      public boolean hasMaxCapacity() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional float maxCapacity = 3;</code>
       * @return The maxCapacity.
       */
      @java.lang.Override
      public float getMaxCapacity() {
        return maxCapacity_;
      }
      /**
       * <code>optional float maxCapacity = 3;</code>
       * @param value The maxCapacity to set.
       * @return This builder for chaining.
       */
      public Builder setMaxCapacity(float value) {

        maxCapacity_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional float maxCapacity = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearMaxCapacity() {
        bitField0_ = (bitField0_ & ~0x00000004);
        maxCapacity_ = 0F;
        onChanged();
        return this;
      }

      private float absoluteMaxCapacity_ ;
      /**
       * <code>optional float absoluteMaxCapacity = 4;</code>
       * @return Whether the absoluteMaxCapacity field is set.
       */
      @java.lang.Override
      public boolean hasAbsoluteMaxCapacity() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional float absoluteMaxCapacity = 4;</code>
       * @return The absoluteMaxCapacity.
       */
      @java.lang.Override
      public float getAbsoluteMaxCapacity() {
        return absoluteMaxCapacity_;
      }
      /**
       * <code>optional float absoluteMaxCapacity = 4;</code>
       * @param value The absoluteMaxCapacity to set.
       * @return This builder for chaining.
       */
      public Builder setAbsoluteMaxCapacity(float value) {

        absoluteMaxCapacity_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional float absoluteMaxCapacity = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearAbsoluteMaxCapacity() {
        bitField0_ = (bitField0_ & ~0x00000008);
        absoluteMaxCapacity_ = 0F;
        onChanged();
        return this;
      }

      private float maxAMPercentage_ ;
      /**
       * <code>optional float maxAMPercentage = 5;</code>
       * @return Whether the maxAMPercentage field is set.
       */
      @java.lang.Override
      public boolean hasMaxAMPercentage() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional float maxAMPercentage = 5;</code>
       * @return The maxAMPercentage.
       */
      @java.lang.Override
      public float getMaxAMPercentage() {
        return maxAMPercentage_;
      }
      /**
       * <code>optional float maxAMPercentage = 5;</code>
       * @param value The maxAMPercentage to set.
       * @return This builder for chaining.
       */
      public Builder setMaxAMPercentage(float value) {

        maxAMPercentage_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional float maxAMPercentage = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearMaxAMPercentage() {
        bitField0_ = (bitField0_ & ~0x00000010);
        maxAMPercentage_ = 0F;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto effectiveMinCapacity_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> effectiveMinCapacityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
       * @return Whether the effectiveMinCapacity field is set.
       */
      public boolean hasEffectiveMinCapacity() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
       * @return The effectiveMinCapacity.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMinCapacity() {
        if (effectiveMinCapacityBuilder_ == null) {
          return effectiveMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMinCapacity_;
        } else {
          return effectiveMinCapacityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
       */
      public Builder setEffectiveMinCapacity(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (effectiveMinCapacityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          effectiveMinCapacity_ = value;
        } else {
          effectiveMinCapacityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
       */
      public Builder setEffectiveMinCapacity(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (effectiveMinCapacityBuilder_ == null) {
          effectiveMinCapacity_ = builderForValue.build();
        } else {
          effectiveMinCapacityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
       */
      public Builder mergeEffectiveMinCapacity(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (effectiveMinCapacityBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0) &&
            effectiveMinCapacity_ != null &&
            effectiveMinCapacity_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getEffectiveMinCapacityBuilder().mergeFrom(value);
          } else {
            effectiveMinCapacity_ = value;
          }
        } else {
          effectiveMinCapacityBuilder_.mergeFrom(value);
        }
        if (effectiveMinCapacity_ != null) {
          bitField0_ |= 0x00000020;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
       */
      public Builder clearEffectiveMinCapacity() {
        bitField0_ = (bitField0_ & ~0x00000020);
        effectiveMinCapacity_ = null;
        if (effectiveMinCapacityBuilder_ != null) {
          effectiveMinCapacityBuilder_.dispose();
          effectiveMinCapacityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getEffectiveMinCapacityBuilder() {
        bitField0_ |= 0x00000020;
        onChanged();
        return getEffectiveMinCapacityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMinCapacityOrBuilder() {
        if (effectiveMinCapacityBuilder_ != null) {
          return effectiveMinCapacityBuilder_.getMessageOrBuilder();
        } else {
          return effectiveMinCapacity_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMinCapacity_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMinCapacity = 6;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getEffectiveMinCapacityFieldBuilder() {
        if (effectiveMinCapacityBuilder_ == null) {
          effectiveMinCapacityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getEffectiveMinCapacity(),
                  getParentForChildren(),
                  isClean());
          effectiveMinCapacity_ = null;
        }
        return effectiveMinCapacityBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto effectiveMaxCapacity_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> effectiveMaxCapacityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
       * @return Whether the effectiveMaxCapacity field is set.
       */
      public boolean hasEffectiveMaxCapacity() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
       * @return The effectiveMaxCapacity.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getEffectiveMaxCapacity() {
        if (effectiveMaxCapacityBuilder_ == null) {
          return effectiveMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMaxCapacity_;
        } else {
          return effectiveMaxCapacityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
       */
      public Builder setEffectiveMaxCapacity(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (effectiveMaxCapacityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          effectiveMaxCapacity_ = value;
        } else {
          effectiveMaxCapacityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
       */
      public Builder setEffectiveMaxCapacity(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (effectiveMaxCapacityBuilder_ == null) {
          effectiveMaxCapacity_ = builderForValue.build();
        } else {
          effectiveMaxCapacityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
       */
      public Builder mergeEffectiveMaxCapacity(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (effectiveMaxCapacityBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0) &&
            effectiveMaxCapacity_ != null &&
            effectiveMaxCapacity_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getEffectiveMaxCapacityBuilder().mergeFrom(value);
          } else {
            effectiveMaxCapacity_ = value;
          }
        } else {
          effectiveMaxCapacityBuilder_.mergeFrom(value);
        }
        if (effectiveMaxCapacity_ != null) {
          bitField0_ |= 0x00000040;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
       */
      public Builder clearEffectiveMaxCapacity() {
        bitField0_ = (bitField0_ & ~0x00000040);
        effectiveMaxCapacity_ = null;
        if (effectiveMaxCapacityBuilder_ != null) {
          effectiveMaxCapacityBuilder_.dispose();
          effectiveMaxCapacityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getEffectiveMaxCapacityBuilder() {
        bitField0_ |= 0x00000040;
        onChanged();
        return getEffectiveMaxCapacityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getEffectiveMaxCapacityOrBuilder() {
        if (effectiveMaxCapacityBuilder_ != null) {
          return effectiveMaxCapacityBuilder_.getMessageOrBuilder();
        } else {
          return effectiveMaxCapacity_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : effectiveMaxCapacity_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto effectiveMaxCapacity = 7;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getEffectiveMaxCapacityFieldBuilder() {
        if (effectiveMaxCapacityBuilder_ == null) {
          effectiveMaxCapacityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getEffectiveMaxCapacity(),
                  getParentForChildren(),
                  isClean());
          effectiveMaxCapacity_ = null;
        }
        return effectiveMaxCapacityBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto configuredMinCapacity_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> configuredMinCapacityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
       * @return Whether the configuredMinCapacity field is set.
       */
      public boolean hasConfiguredMinCapacity() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
       * @return The configuredMinCapacity.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMinCapacity() {
        if (configuredMinCapacityBuilder_ == null) {
          return configuredMinCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMinCapacity_;
        } else {
          return configuredMinCapacityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
       */
      public Builder setConfiguredMinCapacity(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (configuredMinCapacityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          configuredMinCapacity_ = value;
        } else {
          configuredMinCapacityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
       */
      public Builder setConfiguredMinCapacity(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (configuredMinCapacityBuilder_ == null) {
          configuredMinCapacity_ = builderForValue.build();
        } else {
          configuredMinCapacityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
       */
      public Builder mergeConfiguredMinCapacity(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (configuredMinCapacityBuilder_ == null) {
          if (((bitField0_ & 0x00000080) != 0) &&
            configuredMinCapacity_ != null &&
            configuredMinCapacity_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getConfiguredMinCapacityBuilder().mergeFrom(value);
          } else {
            configuredMinCapacity_ = value;
          }
        } else {
          configuredMinCapacityBuilder_.mergeFrom(value);
        }
        if (configuredMinCapacity_ != null) {
          bitField0_ |= 0x00000080;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
       */
      public Builder clearConfiguredMinCapacity() {
        bitField0_ = (bitField0_ & ~0x00000080);
        configuredMinCapacity_ = null;
        if (configuredMinCapacityBuilder_ != null) {
          configuredMinCapacityBuilder_.dispose();
          configuredMinCapacityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getConfiguredMinCapacityBuilder() {
        bitField0_ |= 0x00000080;
        onChanged();
        return getConfiguredMinCapacityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMinCapacityOrBuilder() {
        if (configuredMinCapacityBuilder_ != null) {
          return configuredMinCapacityBuilder_.getMessageOrBuilder();
        } else {
          return configuredMinCapacity_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMinCapacity_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMinCapacity = 8;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getConfiguredMinCapacityFieldBuilder() {
        if (configuredMinCapacityBuilder_ == null) {
          configuredMinCapacityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getConfiguredMinCapacity(),
                  getParentForChildren(),
                  isClean());
          configuredMinCapacity_ = null;
        }
        return configuredMinCapacityBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto configuredMaxCapacity_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> configuredMaxCapacityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
       * @return Whether the configuredMaxCapacity field is set.
       */
      public boolean hasConfiguredMaxCapacity() {
        return ((bitField0_ & 0x00000100) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
       * @return The configuredMaxCapacity.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getConfiguredMaxCapacity() {
        if (configuredMaxCapacityBuilder_ == null) {
          return configuredMaxCapacity_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMaxCapacity_;
        } else {
          return configuredMaxCapacityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
       */
      public Builder setConfiguredMaxCapacity(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (configuredMaxCapacityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          configuredMaxCapacity_ = value;
        } else {
          configuredMaxCapacityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
       */
      public Builder setConfiguredMaxCapacity(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (configuredMaxCapacityBuilder_ == null) {
          configuredMaxCapacity_ = builderForValue.build();
        } else {
          configuredMaxCapacityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000100;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
       */
      public Builder mergeConfiguredMaxCapacity(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (configuredMaxCapacityBuilder_ == null) {
          if (((bitField0_ & 0x00000100) != 0) &&
            configuredMaxCapacity_ != null &&
            configuredMaxCapacity_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getConfiguredMaxCapacityBuilder().mergeFrom(value);
          } else {
            configuredMaxCapacity_ = value;
          }
        } else {
          configuredMaxCapacityBuilder_.mergeFrom(value);
        }
        if (configuredMaxCapacity_ != null) {
          bitField0_ |= 0x00000100;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
       */
      public Builder clearConfiguredMaxCapacity() {
        bitField0_ = (bitField0_ & ~0x00000100);
        configuredMaxCapacity_ = null;
        if (configuredMaxCapacityBuilder_ != null) {
          configuredMaxCapacityBuilder_.dispose();
          configuredMaxCapacityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getConfiguredMaxCapacityBuilder() {
        bitField0_ |= 0x00000100;
        onChanged();
        return getConfiguredMaxCapacityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getConfiguredMaxCapacityOrBuilder() {
        if (configuredMaxCapacityBuilder_ != null) {
          return configuredMaxCapacityBuilder_.getMessageOrBuilder();
        } else {
          return configuredMaxCapacity_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : configuredMaxCapacity_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto configuredMaxCapacity = 9;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getConfiguredMaxCapacityFieldBuilder() {
        if (configuredMaxCapacityBuilder_ == null) {
          configuredMaxCapacityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getConfiguredMaxCapacity(),
                  getParentForChildren(),
                  isClean());
          configuredMaxCapacity_ = null;
        }
        return configuredMaxCapacityBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.QueueConfigurationsProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.QueueConfigurationsProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<QueueConfigurationsProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<QueueConfigurationsProto>() {
      @java.lang.Override
      public QueueConfigurationsProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<QueueConfigurationsProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<QueueConfigurationsProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface QueueConfigurationsMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.QueueConfigurationsMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string partitionName = 1;</code>
     * @return Whether the partitionName field is set.
     */
    boolean hasPartitionName();
    /**
     * <code>required string partitionName = 1;</code>
     * @return The partitionName.
     */
    java.lang.String getPartitionName();
    /**
     * <code>required string partitionName = 1;</code>
     * @return The bytes for partitionName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getPartitionNameBytes();

    /**
     * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
     * @return Whether the queueConfigurations field is set.
     */
    boolean hasQueueConfigurations();
    /**
     * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
     * @return The queueConfigurations.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getQueueConfigurations();
    /**
     * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder getQueueConfigurationsOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.QueueConfigurationsMapProto}
   */
  public static final class QueueConfigurationsMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.QueueConfigurationsMapProto)
      QueueConfigurationsMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use QueueConfigurationsMapProto.newBuilder() to construct.
    private QueueConfigurationsMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private QueueConfigurationsMapProto() {
      partitionName_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new QueueConfigurationsMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int PARTITIONNAME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object partitionName_ = "";
    /**
     * <code>required string partitionName = 1;</code>
     * @return Whether the partitionName field is set.
     */
    @java.lang.Override
    public boolean hasPartitionName() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string partitionName = 1;</code>
     * @return The partitionName.
     */
    @java.lang.Override
    public java.lang.String getPartitionName() {
      java.lang.Object ref = partitionName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          partitionName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string partitionName = 1;</code>
     * @return The bytes for partitionName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getPartitionNameBytes() {
      java.lang.Object ref = partitionName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        partitionName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int QUEUECONFIGURATIONS_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto queueConfigurations_;
    /**
     * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
     * @return Whether the queueConfigurations field is set.
     */
    @java.lang.Override
    public boolean hasQueueConfigurations() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
     * @return The queueConfigurations.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getQueueConfigurations() {
      return queueConfigurations_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance() : queueConfigurations_;
    }
    /**
     * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder getQueueConfigurationsOrBuilder() {
      return queueConfigurations_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance() : queueConfigurations_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasPartitionName()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (hasQueueConfigurations()) {
        if (!getQueueConfigurations().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, partitionName_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getQueueConfigurations());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, partitionName_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getQueueConfigurations());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto) obj;

      if (hasPartitionName() != other.hasPartitionName()) return false;
      if (hasPartitionName()) {
        if (!getPartitionName()
            .equals(other.getPartitionName())) return false;
      }
      if (hasQueueConfigurations() != other.hasQueueConfigurations()) return false;
      if (hasQueueConfigurations()) {
        if (!getQueueConfigurations()
            .equals(other.getQueueConfigurations())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasPartitionName()) {
        hash = (37 * hash) + PARTITIONNAME_FIELD_NUMBER;
        hash = (53 * hash) + getPartitionName().hashCode();
      }
      if (hasQueueConfigurations()) {
        hash = (37 * hash) + QUEUECONFIGURATIONS_FIELD_NUMBER;
        hash = (53 * hash) + getQueueConfigurations().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.QueueConfigurationsMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.QueueConfigurationsMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getQueueConfigurationsFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        partitionName_ = "";
        queueConfigurations_ = null;
        if (queueConfigurationsBuilder_ != null) {
          queueConfigurationsBuilder_.dispose();
          queueConfigurationsBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.partitionName_ = partitionName_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.queueConfigurations_ = queueConfigurationsBuilder_ == null
              ? queueConfigurations_
              : queueConfigurationsBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto.getDefaultInstance()) return this;
        if (other.hasPartitionName()) {
          partitionName_ = other.partitionName_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasQueueConfigurations()) {
          mergeQueueConfigurations(other.getQueueConfigurations());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasPartitionName()) {
          return false;
        }
        if (hasQueueConfigurations()) {
          if (!getQueueConfigurations().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                partitionName_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getQueueConfigurationsFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object partitionName_ = "";
      /**
       * <code>required string partitionName = 1;</code>
       * @return Whether the partitionName field is set.
       */
      public boolean hasPartitionName() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string partitionName = 1;</code>
       * @return The partitionName.
       */
      public java.lang.String getPartitionName() {
        java.lang.Object ref = partitionName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            partitionName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string partitionName = 1;</code>
       * @return The bytes for partitionName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getPartitionNameBytes() {
        java.lang.Object ref = partitionName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          partitionName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string partitionName = 1;</code>
       * @param value The partitionName to set.
       * @return This builder for chaining.
       */
      public Builder setPartitionName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        partitionName_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string partitionName = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearPartitionName() {
        partitionName_ = getDefaultInstance().getPartitionName();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string partitionName = 1;</code>
       * @param value The bytes for partitionName to set.
       * @return This builder for chaining.
       */
      public Builder setPartitionNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        partitionName_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto queueConfigurations_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder> queueConfigurationsBuilder_;
      /**
       * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
       * @return Whether the queueConfigurations field is set.
       */
      public boolean hasQueueConfigurations() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
       * @return The queueConfigurations.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto getQueueConfigurations() {
        if (queueConfigurationsBuilder_ == null) {
          return queueConfigurations_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance() : queueConfigurations_;
        } else {
          return queueConfigurationsBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
       */
      public Builder setQueueConfigurations(org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto value) {
        if (queueConfigurationsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          queueConfigurations_ = value;
        } else {
          queueConfigurationsBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
       */
      public Builder setQueueConfigurations(
          org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder builderForValue) {
        if (queueConfigurationsBuilder_ == null) {
          queueConfigurations_ = builderForValue.build();
        } else {
          queueConfigurationsBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
       */
      public Builder mergeQueueConfigurations(org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto value) {
        if (queueConfigurationsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            queueConfigurations_ != null &&
            queueConfigurations_ != org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance()) {
            getQueueConfigurationsBuilder().mergeFrom(value);
          } else {
            queueConfigurations_ = value;
          }
        } else {
          queueConfigurationsBuilder_.mergeFrom(value);
        }
        if (queueConfigurations_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
       */
      public Builder clearQueueConfigurations() {
        bitField0_ = (bitField0_ & ~0x00000002);
        queueConfigurations_ = null;
        if (queueConfigurationsBuilder_ != null) {
          queueConfigurationsBuilder_.dispose();
          queueConfigurationsBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder getQueueConfigurationsBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getQueueConfigurationsFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder getQueueConfigurationsOrBuilder() {
        if (queueConfigurationsBuilder_ != null) {
          return queueConfigurationsBuilder_.getMessageOrBuilder();
        } else {
          return queueConfigurations_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.getDefaultInstance() : queueConfigurations_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.QueueConfigurationsProto queueConfigurations = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder> 
          getQueueConfigurationsFieldBuilder() {
        if (queueConfigurationsBuilder_ == null) {
          queueConfigurationsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsProtoOrBuilder>(
                  getQueueConfigurations(),
                  getParentForChildren(),
                  isClean());
          queueConfigurations_ = null;
        }
        return queueConfigurationsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.QueueConfigurationsMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.QueueConfigurationsMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<QueueConfigurationsMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<QueueConfigurationsMapProto>() {
      @java.lang.Override
      public QueueConfigurationsMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<QueueConfigurationsMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<QueueConfigurationsMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueConfigurationsMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface QueueUserACLInfoProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.QueueUserACLInfoProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string queueName = 1;</code>
     * @return Whether the queueName field is set.
     */
    boolean hasQueueName();
    /**
     * <code>optional string queueName = 1;</code>
     * @return The queueName.
     */
    java.lang.String getQueueName();
    /**
     * <code>optional string queueName = 1;</code>
     * @return The bytes for queueName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueNameBytes();

    /**
     * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
     * @return A list containing the userAcls.
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto> getUserAclsList();
    /**
     * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
     * @return The count of userAcls.
     */
    int getUserAclsCount();
    /**
     * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
     * @param index The index of the element to return.
     * @return The userAcls at the given index.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto getUserAcls(int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.QueueUserACLInfoProto}
   */
  public static final class QueueUserACLInfoProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.QueueUserACLInfoProto)
      QueueUserACLInfoProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use QueueUserACLInfoProto.newBuilder() to construct.
    private QueueUserACLInfoProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private QueueUserACLInfoProto() {
      queueName_ = "";
      userAcls_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new QueueUserACLInfoProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueUserACLInfoProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder.class);
    }

    private int bitField0_;
    public static final int QUEUENAME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object queueName_ = "";
    /**
     * <code>optional string queueName = 1;</code>
     * @return Whether the queueName field is set.
     */
    @java.lang.Override
    public boolean hasQueueName() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string queueName = 1;</code>
     * @return The queueName.
     */
    @java.lang.Override
    public java.lang.String getQueueName() {
      java.lang.Object ref = queueName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          queueName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string queueName = 1;</code>
     * @return The bytes for queueName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getQueueNameBytes() {
      java.lang.Object ref = queueName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        queueName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int USERACLS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<java.lang.Integer> userAcls_;
    private static final org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter<
        java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto> userAcls_converter_ =
            new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter.Converter<
                java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto>() {
              public org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto convert(java.lang.Integer from) {
                org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto result = org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto.forNumber(from);
                return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto.QACL_SUBMIT_APPLICATIONS : result;
              }
            };
    /**
     * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
     * @return A list containing the userAcls.
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto> getUserAclsList() {
      return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter<
          java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto>(userAcls_, userAcls_converter_);
    }
    /**
     * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
     * @return The count of userAcls.
     */
    @java.lang.Override
    public int getUserAclsCount() {
      return userAcls_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
     * @param index The index of the element to return.
     * @return The userAcls at the given index.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto getUserAcls(int index) {
      return userAcls_converter_.convert(userAcls_.get(index));
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, queueName_);
      }
      for (int i = 0; i < userAcls_.size(); i++) {
        output.writeEnum(2, userAcls_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, queueName_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < userAcls_.size(); i++) {
          dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
            .computeEnumSizeNoTag(userAcls_.get(i));
        }
        size += dataSize;
        size += 1 * userAcls_.size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto other = (org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto) obj;

      if (hasQueueName() != other.hasQueueName()) return false;
      if (hasQueueName()) {
        if (!getQueueName()
            .equals(other.getQueueName())) return false;
      }
      if (!userAcls_.equals(other.userAcls_)) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasQueueName()) {
        hash = (37 * hash) + QUEUENAME_FIELD_NUMBER;
        hash = (53 * hash) + getQueueName().hashCode();
      }
      if (getUserAclsCount() > 0) {
        hash = (37 * hash) + USERACLS_FIELD_NUMBER;
        hash = (53 * hash) + userAcls_.hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.QueueUserACLInfoProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.QueueUserACLInfoProto)
        org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueUserACLInfoProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        queueName_ = "";
        userAcls_ = java.util.Collections.emptyList();
        bitField0_ = (bitField0_ & ~0x00000002);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto result = new org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto result) {
        if (((bitField0_ & 0x00000002) != 0)) {
          userAcls_ = java.util.Collections.unmodifiableList(userAcls_);
          bitField0_ = (bitField0_ & ~0x00000002);
        }
        result.userAcls_ = userAcls_;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.queueName_ = queueName_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto.getDefaultInstance()) return this;
        if (other.hasQueueName()) {
          queueName_ = other.queueName_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (!other.userAcls_.isEmpty()) {
          if (userAcls_.isEmpty()) {
            userAcls_ = other.userAcls_;
            bitField0_ = (bitField0_ & ~0x00000002);
          } else {
            ensureUserAclsIsMutable();
            userAcls_.addAll(other.userAcls_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                queueName_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(2, tmpRaw);
                } else {
                  ensureUserAclsIsMutable();
                  userAcls_.add(tmpRaw);
                }
                break;
              } // case 16
              case 18: {
                int length = input.readRawVarint32();
                int oldLimit = input.pushLimit(length);
                while(input.getBytesUntilLimit() > 0) {
                  int tmpRaw = input.readEnum();
                  org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto tmpValue =
                      org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto.forNumber(tmpRaw);
                  if (tmpValue == null) {
                    mergeUnknownVarintField(2, tmpRaw);
                  } else {
                    ensureUserAclsIsMutable();
                    userAcls_.add(tmpRaw);
                  }
                }
                input.popLimit(oldLimit);
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object queueName_ = "";
      /**
       * <code>optional string queueName = 1;</code>
       * @return Whether the queueName field is set.
       */
      public boolean hasQueueName() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @return The queueName.
       */
      public java.lang.String getQueueName() {
        java.lang.Object ref = queueName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            queueName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @return The bytes for queueName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getQueueNameBytes() {
        java.lang.Object ref = queueName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          queueName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @param value The queueName to set.
       * @return This builder for chaining.
       */
      public Builder setQueueName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        queueName_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearQueueName() {
        queueName_ = getDefaultInstance().getQueueName();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string queueName = 1;</code>
       * @param value The bytes for queueName to set.
       * @return This builder for chaining.
       */
      public Builder setQueueNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        queueName_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.util.List<java.lang.Integer> userAcls_ =
        java.util.Collections.emptyList();
      private void ensureUserAclsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          userAcls_ = new java.util.ArrayList<java.lang.Integer>(userAcls_);
          bitField0_ |= 0x00000002;
        }
      }
      /**
       * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
       * @return A list containing the userAcls.
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto> getUserAclsList() {
        return new org.apache.hadoop.thirdparty.protobuf.Internal.ListAdapter<
            java.lang.Integer, org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto>(userAcls_, userAcls_converter_);
      }
      /**
       * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
       * @return The count of userAcls.
       */
      public int getUserAclsCount() {
        return userAcls_.size();
      }
      /**
       * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
       * @param index The index of the element to return.
       * @return The userAcls at the given index.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto getUserAcls(int index) {
        return userAcls_converter_.convert(userAcls_.get(index));
      }
      /**
       * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
       * @param index The index to set the value at.
       * @param value The userAcls to set.
       * @return This builder for chaining.
       */
      public Builder setUserAcls(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        ensureUserAclsIsMutable();
        userAcls_.set(index, value.getNumber());
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
       * @param value The userAcls to add.
       * @return This builder for chaining.
       */
      public Builder addUserAcls(org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        ensureUserAclsIsMutable();
        userAcls_.add(value.getNumber());
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
       * @param values The userAcls to add.
       * @return This builder for chaining.
       */
      public Builder addAllUserAcls(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto> values) {
        ensureUserAclsIsMutable();
        for (org.apache.hadoop.yarn.proto.YarnProtos.QueueACLProto value : values) {
          userAcls_.add(value.getNumber());
        }
        onChanged();
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.QueueACLProto userAcls = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearUserAcls() {
        userAcls_ = java.util.Collections.emptyList();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.QueueUserACLInfoProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.QueueUserACLInfoProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<QueueUserACLInfoProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<QueueUserACLInfoProto>() {
      @java.lang.Override
      public QueueUserACLInfoProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<QueueUserACLInfoProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<QueueUserACLInfoProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.QueueUserACLInfoProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface PlacementConstraintProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.PlacementConstraintProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
     * @return Whether the simpleConstraint field is set.
     */
    boolean hasSimpleConstraint();
    /**
     * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
     * @return The simpleConstraint.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getSimpleConstraint();
    /**
     * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder getSimpleConstraintOrBuilder();

    /**
     * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
     * @return Whether the compositeConstraint field is set.
     */
    boolean hasCompositeConstraint();
    /**
     * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
     * @return The compositeConstraint.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getCompositeConstraint();
    /**
     * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder getCompositeConstraintOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.PlacementConstraintProto}
   */
  public static final class PlacementConstraintProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.PlacementConstraintProto)
      PlacementConstraintProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use PlacementConstraintProto.newBuilder() to construct.
    private PlacementConstraintProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private PlacementConstraintProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new PlacementConstraintProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder.class);
    }

    private int bitField0_;
    public static final int SIMPLECONSTRAINT_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto simpleConstraint_;
    /**
     * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
     * @return Whether the simpleConstraint field is set.
     */
    @java.lang.Override
    public boolean hasSimpleConstraint() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
     * @return The simpleConstraint.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getSimpleConstraint() {
      return simpleConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance() : simpleConstraint_;
    }
    /**
     * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder getSimpleConstraintOrBuilder() {
      return simpleConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance() : simpleConstraint_;
    }

    public static final int COMPOSITECONSTRAINT_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto compositeConstraint_;
    /**
     * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
     * @return Whether the compositeConstraint field is set.
     */
    @java.lang.Override
    public boolean hasCompositeConstraint() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
     * @return The compositeConstraint.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getCompositeConstraint() {
      return compositeConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance() : compositeConstraint_;
    }
    /**
     * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder getCompositeConstraintOrBuilder() {
      return compositeConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance() : compositeConstraint_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasSimpleConstraint()) {
        if (!getSimpleConstraint().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      if (hasCompositeConstraint()) {
        if (!getCompositeConstraint().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getSimpleConstraint());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getCompositeConstraint());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getSimpleConstraint());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getCompositeConstraint());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto other = (org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto) obj;

      if (hasSimpleConstraint() != other.hasSimpleConstraint()) return false;
      if (hasSimpleConstraint()) {
        if (!getSimpleConstraint()
            .equals(other.getSimpleConstraint())) return false;
      }
      if (hasCompositeConstraint() != other.hasCompositeConstraint()) return false;
      if (hasCompositeConstraint()) {
        if (!getCompositeConstraint()
            .equals(other.getCompositeConstraint())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasSimpleConstraint()) {
        hash = (37 * hash) + SIMPLECONSTRAINT_FIELD_NUMBER;
        hash = (53 * hash) + getSimpleConstraint().hashCode();
      }
      if (hasCompositeConstraint()) {
        hash = (37 * hash) + COMPOSITECONSTRAINT_FIELD_NUMBER;
        hash = (53 * hash) + getCompositeConstraint().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.PlacementConstraintProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.PlacementConstraintProto)
        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getSimpleConstraintFieldBuilder();
          getCompositeConstraintFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        simpleConstraint_ = null;
        if (simpleConstraintBuilder_ != null) {
          simpleConstraintBuilder_.dispose();
          simpleConstraintBuilder_ = null;
        }
        compositeConstraint_ = null;
        if (compositeConstraintBuilder_ != null) {
          compositeConstraintBuilder_.dispose();
          compositeConstraintBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.simpleConstraint_ = simpleConstraintBuilder_ == null
              ? simpleConstraint_
              : simpleConstraintBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.compositeConstraint_ = compositeConstraintBuilder_ == null
              ? compositeConstraint_
              : compositeConstraintBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance()) return this;
        if (other.hasSimpleConstraint()) {
          mergeSimpleConstraint(other.getSimpleConstraint());
        }
        if (other.hasCompositeConstraint()) {
          mergeCompositeConstraint(other.getCompositeConstraint());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasSimpleConstraint()) {
          if (!getSimpleConstraint().isInitialized()) {
            return false;
          }
        }
        if (hasCompositeConstraint()) {
          if (!getCompositeConstraint().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getSimpleConstraintFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getCompositeConstraintFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto simpleConstraint_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder> simpleConstraintBuilder_;
      /**
       * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
       * @return Whether the simpleConstraint field is set.
       */
      public boolean hasSimpleConstraint() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
       * @return The simpleConstraint.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getSimpleConstraint() {
        if (simpleConstraintBuilder_ == null) {
          return simpleConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance() : simpleConstraint_;
        } else {
          return simpleConstraintBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
       */
      public Builder setSimpleConstraint(org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto value) {
        if (simpleConstraintBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          simpleConstraint_ = value;
        } else {
          simpleConstraintBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
       */
      public Builder setSimpleConstraint(
          org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder builderForValue) {
        if (simpleConstraintBuilder_ == null) {
          simpleConstraint_ = builderForValue.build();
        } else {
          simpleConstraintBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
       */
      public Builder mergeSimpleConstraint(org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto value) {
        if (simpleConstraintBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            simpleConstraint_ != null &&
            simpleConstraint_ != org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance()) {
            getSimpleConstraintBuilder().mergeFrom(value);
          } else {
            simpleConstraint_ = value;
          }
        } else {
          simpleConstraintBuilder_.mergeFrom(value);
        }
        if (simpleConstraint_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
       */
      public Builder clearSimpleConstraint() {
        bitField0_ = (bitField0_ & ~0x00000001);
        simpleConstraint_ = null;
        if (simpleConstraintBuilder_ != null) {
          simpleConstraintBuilder_.dispose();
          simpleConstraintBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder getSimpleConstraintBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getSimpleConstraintFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder getSimpleConstraintOrBuilder() {
        if (simpleConstraintBuilder_ != null) {
          return simpleConstraintBuilder_.getMessageOrBuilder();
        } else {
          return simpleConstraint_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance() : simpleConstraint_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.SimplePlacementConstraintProto simpleConstraint = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder> 
          getSimpleConstraintFieldBuilder() {
        if (simpleConstraintBuilder_ == null) {
          simpleConstraintBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder>(
                  getSimpleConstraint(),
                  getParentForChildren(),
                  isClean());
          simpleConstraint_ = null;
        }
        return simpleConstraintBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto compositeConstraint_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder> compositeConstraintBuilder_;
      /**
       * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
       * @return Whether the compositeConstraint field is set.
       */
      public boolean hasCompositeConstraint() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
       * @return The compositeConstraint.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getCompositeConstraint() {
        if (compositeConstraintBuilder_ == null) {
          return compositeConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance() : compositeConstraint_;
        } else {
          return compositeConstraintBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
       */
      public Builder setCompositeConstraint(org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto value) {
        if (compositeConstraintBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          compositeConstraint_ = value;
        } else {
          compositeConstraintBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
       */
      public Builder setCompositeConstraint(
          org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder builderForValue) {
        if (compositeConstraintBuilder_ == null) {
          compositeConstraint_ = builderForValue.build();
        } else {
          compositeConstraintBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
       */
      public Builder mergeCompositeConstraint(org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto value) {
        if (compositeConstraintBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            compositeConstraint_ != null &&
            compositeConstraint_ != org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance()) {
            getCompositeConstraintBuilder().mergeFrom(value);
          } else {
            compositeConstraint_ = value;
          }
        } else {
          compositeConstraintBuilder_.mergeFrom(value);
        }
        if (compositeConstraint_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
       */
      public Builder clearCompositeConstraint() {
        bitField0_ = (bitField0_ & ~0x00000002);
        compositeConstraint_ = null;
        if (compositeConstraintBuilder_ != null) {
          compositeConstraintBuilder_.dispose();
          compositeConstraintBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder getCompositeConstraintBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getCompositeConstraintFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder getCompositeConstraintOrBuilder() {
        if (compositeConstraintBuilder_ != null) {
          return compositeConstraintBuilder_.getMessageOrBuilder();
        } else {
          return compositeConstraint_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance() : compositeConstraint_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.CompositePlacementConstraintProto compositeConstraint = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder> 
          getCompositeConstraintFieldBuilder() {
        if (compositeConstraintBuilder_ == null) {
          compositeConstraintBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder>(
                  getCompositeConstraint(),
                  getParentForChildren(),
                  isClean());
          compositeConstraint_ = null;
        }
        return compositeConstraintBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.PlacementConstraintProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.PlacementConstraintProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<PlacementConstraintProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<PlacementConstraintProto>() {
      @java.lang.Override
      public PlacementConstraintProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<PlacementConstraintProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<PlacementConstraintProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface SimplePlacementConstraintProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.SimplePlacementConstraintProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string scope = 1;</code>
     * @return Whether the scope field is set.
     */
    boolean hasScope();
    /**
     * <code>required string scope = 1;</code>
     * @return The scope.
     */
    java.lang.String getScope();
    /**
     * <code>required string scope = 1;</code>
     * @return The bytes for scope.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getScopeBytes();

    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto> 
        getTargetExpressionsList();
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getTargetExpressions(int index);
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
     */
    int getTargetExpressionsCount();
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder> 
        getTargetExpressionsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder getTargetExpressionsOrBuilder(
        int index);

    /**
     * <code>optional int32 minCardinality = 3;</code>
     * @return Whether the minCardinality field is set.
     */
    boolean hasMinCardinality();
    /**
     * <code>optional int32 minCardinality = 3;</code>
     * @return The minCardinality.
     */
    int getMinCardinality();

    /**
     * <code>optional int32 maxCardinality = 4;</code>
     * @return Whether the maxCardinality field is set.
     */
    boolean hasMaxCardinality();
    /**
     * <code>optional int32 maxCardinality = 4;</code>
     * @return The maxCardinality.
     */
    int getMaxCardinality();

    /**
     * <code>optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5;</code>
     * @return Whether the attributeOpCode field is set.
     */
    boolean hasAttributeOpCode();
    /**
     * <code>optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5;</code>
     * @return The attributeOpCode.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto getAttributeOpCode();
  }
  /**
   * Protobuf type {@code hadoop.yarn.SimplePlacementConstraintProto}
   */
  public static final class SimplePlacementConstraintProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.SimplePlacementConstraintProto)
      SimplePlacementConstraintProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use SimplePlacementConstraintProto.newBuilder() to construct.
    private SimplePlacementConstraintProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private SimplePlacementConstraintProto() {
      scope_ = "";
      targetExpressions_ = java.util.Collections.emptyList();
      attributeOpCode_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new SimplePlacementConstraintProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SimplePlacementConstraintProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder.class);
    }

    private int bitField0_;
    public static final int SCOPE_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object scope_ = "";
    /**
     * <code>required string scope = 1;</code>
     * @return Whether the scope field is set.
     */
    @java.lang.Override
    public boolean hasScope() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string scope = 1;</code>
     * @return The scope.
     */
    @java.lang.Override
    public java.lang.String getScope() {
      java.lang.Object ref = scope_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          scope_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string scope = 1;</code>
     * @return The bytes for scope.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getScopeBytes() {
      java.lang.Object ref = scope_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        scope_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int TARGETEXPRESSIONS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto> targetExpressions_;
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto> getTargetExpressionsList() {
      return targetExpressions_;
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder> 
        getTargetExpressionsOrBuilderList() {
      return targetExpressions_;
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
     */
    @java.lang.Override
    public int getTargetExpressionsCount() {
      return targetExpressions_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getTargetExpressions(int index) {
      return targetExpressions_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder getTargetExpressionsOrBuilder(
        int index) {
      return targetExpressions_.get(index);
    }

    public static final int MINCARDINALITY_FIELD_NUMBER = 3;
    private int minCardinality_ = 0;
    /**
     * <code>optional int32 minCardinality = 3;</code>
     * @return Whether the minCardinality field is set.
     */
    @java.lang.Override
    public boolean hasMinCardinality() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 minCardinality = 3;</code>
     * @return The minCardinality.
     */
    @java.lang.Override
    public int getMinCardinality() {
      return minCardinality_;
    }

    public static final int MAXCARDINALITY_FIELD_NUMBER = 4;
    private int maxCardinality_ = 0;
    /**
     * <code>optional int32 maxCardinality = 4;</code>
     * @return Whether the maxCardinality field is set.
     */
    @java.lang.Override
    public boolean hasMaxCardinality() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int32 maxCardinality = 4;</code>
     * @return The maxCardinality.
     */
    @java.lang.Override
    public int getMaxCardinality() {
      return maxCardinality_;
    }

    public static final int ATTRIBUTEOPCODE_FIELD_NUMBER = 5;
    private int attributeOpCode_ = 1;
    /**
     * <code>optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5;</code>
     * @return Whether the attributeOpCode field is set.
     */
    @java.lang.Override public boolean hasAttributeOpCode() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5;</code>
     * @return The attributeOpCode.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto getAttributeOpCode() {
      org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto.forNumber(attributeOpCode_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto.NO_OP : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasScope()) {
        memoizedIsInitialized = 0;
        return false;
      }
      for (int i = 0; i < getTargetExpressionsCount(); i++) {
        if (!getTargetExpressions(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, scope_);
      }
      for (int i = 0; i < targetExpressions_.size(); i++) {
        output.writeMessage(2, targetExpressions_.get(i));
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(3, minCardinality_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt32(4, maxCardinality_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeEnum(5, attributeOpCode_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, scope_);
      }
      for (int i = 0; i < targetExpressions_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, targetExpressions_.get(i));
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(3, minCardinality_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(4, maxCardinality_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(5, attributeOpCode_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto other = (org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto) obj;

      if (hasScope() != other.hasScope()) return false;
      if (hasScope()) {
        if (!getScope()
            .equals(other.getScope())) return false;
      }
      if (!getTargetExpressionsList()
          .equals(other.getTargetExpressionsList())) return false;
      if (hasMinCardinality() != other.hasMinCardinality()) return false;
      if (hasMinCardinality()) {
        if (getMinCardinality()
            != other.getMinCardinality()) return false;
      }
      if (hasMaxCardinality() != other.hasMaxCardinality()) return false;
      if (hasMaxCardinality()) {
        if (getMaxCardinality()
            != other.getMaxCardinality()) return false;
      }
      if (hasAttributeOpCode() != other.hasAttributeOpCode()) return false;
      if (hasAttributeOpCode()) {
        if (attributeOpCode_ != other.attributeOpCode_) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasScope()) {
        hash = (37 * hash) + SCOPE_FIELD_NUMBER;
        hash = (53 * hash) + getScope().hashCode();
      }
      if (getTargetExpressionsCount() > 0) {
        hash = (37 * hash) + TARGETEXPRESSIONS_FIELD_NUMBER;
        hash = (53 * hash) + getTargetExpressionsList().hashCode();
      }
      if (hasMinCardinality()) {
        hash = (37 * hash) + MINCARDINALITY_FIELD_NUMBER;
        hash = (53 * hash) + getMinCardinality();
      }
      if (hasMaxCardinality()) {
        hash = (37 * hash) + MAXCARDINALITY_FIELD_NUMBER;
        hash = (53 * hash) + getMaxCardinality();
      }
      if (hasAttributeOpCode()) {
        hash = (37 * hash) + ATTRIBUTEOPCODE_FIELD_NUMBER;
        hash = (53 * hash) + attributeOpCode_;
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.SimplePlacementConstraintProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.SimplePlacementConstraintProto)
        org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SimplePlacementConstraintProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        scope_ = "";
        if (targetExpressionsBuilder_ == null) {
          targetExpressions_ = java.util.Collections.emptyList();
        } else {
          targetExpressions_ = null;
          targetExpressionsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        minCardinality_ = 0;
        maxCardinality_ = 0;
        attributeOpCode_ = 1;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto result = new org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto result) {
        if (targetExpressionsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            targetExpressions_ = java.util.Collections.unmodifiableList(targetExpressions_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.targetExpressions_ = targetExpressions_;
        } else {
          result.targetExpressions_ = targetExpressionsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.scope_ = scope_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.minCardinality_ = minCardinality_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.maxCardinality_ = maxCardinality_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.attributeOpCode_ = attributeOpCode_;
          to_bitField0_ |= 0x00000008;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto.getDefaultInstance()) return this;
        if (other.hasScope()) {
          scope_ = other.scope_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (targetExpressionsBuilder_ == null) {
          if (!other.targetExpressions_.isEmpty()) {
            if (targetExpressions_.isEmpty()) {
              targetExpressions_ = other.targetExpressions_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureTargetExpressionsIsMutable();
              targetExpressions_.addAll(other.targetExpressions_);
            }
            onChanged();
          }
        } else {
          if (!other.targetExpressions_.isEmpty()) {
            if (targetExpressionsBuilder_.isEmpty()) {
              targetExpressionsBuilder_.dispose();
              targetExpressionsBuilder_ = null;
              targetExpressions_ = other.targetExpressions_;
              bitField0_ = (bitField0_ & ~0x00000002);
              targetExpressionsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getTargetExpressionsFieldBuilder() : null;
            } else {
              targetExpressionsBuilder_.addAllMessages(other.targetExpressions_);
            }
          }
        }
        if (other.hasMinCardinality()) {
          setMinCardinality(other.getMinCardinality());
        }
        if (other.hasMaxCardinality()) {
          setMaxCardinality(other.getMaxCardinality());
        }
        if (other.hasAttributeOpCode()) {
          setAttributeOpCode(other.getAttributeOpCode());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasScope()) {
          return false;
        }
        for (int i = 0; i < getTargetExpressionsCount(); i++) {
          if (!getTargetExpressions(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                scope_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.PARSER,
                        extensionRegistry);
                if (targetExpressionsBuilder_ == null) {
                  ensureTargetExpressionsIsMutable();
                  targetExpressions_.add(m);
                } else {
                  targetExpressionsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              case 24: {
                minCardinality_ = input.readInt32();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 32: {
                maxCardinality_ = input.readInt32();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              case 40: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(5, tmpRaw);
                } else {
                  attributeOpCode_ = tmpRaw;
                  bitField0_ |= 0x00000010;
                }
                break;
              } // case 40
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object scope_ = "";
      /**
       * <code>required string scope = 1;</code>
       * @return Whether the scope field is set.
       */
      public boolean hasScope() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string scope = 1;</code>
       * @return The scope.
       */
      public java.lang.String getScope() {
        java.lang.Object ref = scope_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            scope_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string scope = 1;</code>
       * @return The bytes for scope.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getScopeBytes() {
        java.lang.Object ref = scope_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          scope_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string scope = 1;</code>
       * @param value The scope to set.
       * @return This builder for chaining.
       */
      public Builder setScope(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        scope_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string scope = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearScope() {
        scope_ = getDefaultInstance().getScope();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string scope = 1;</code>
       * @param value The bytes for scope to set.
       * @return This builder for chaining.
       */
      public Builder setScopeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        scope_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto> targetExpressions_ =
        java.util.Collections.emptyList();
      private void ensureTargetExpressionsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          targetExpressions_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto>(targetExpressions_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder> targetExpressionsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto> getTargetExpressionsList() {
        if (targetExpressionsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(targetExpressions_);
        } else {
          return targetExpressionsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public int getTargetExpressionsCount() {
        if (targetExpressionsBuilder_ == null) {
          return targetExpressions_.size();
        } else {
          return targetExpressionsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getTargetExpressions(int index) {
        if (targetExpressionsBuilder_ == null) {
          return targetExpressions_.get(index);
        } else {
          return targetExpressionsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public Builder setTargetExpressions(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto value) {
        if (targetExpressionsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureTargetExpressionsIsMutable();
          targetExpressions_.set(index, value);
          onChanged();
        } else {
          targetExpressionsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public Builder setTargetExpressions(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder builderForValue) {
        if (targetExpressionsBuilder_ == null) {
          ensureTargetExpressionsIsMutable();
          targetExpressions_.set(index, builderForValue.build());
          onChanged();
        } else {
          targetExpressionsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public Builder addTargetExpressions(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto value) {
        if (targetExpressionsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureTargetExpressionsIsMutable();
          targetExpressions_.add(value);
          onChanged();
        } else {
          targetExpressionsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public Builder addTargetExpressions(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto value) {
        if (targetExpressionsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureTargetExpressionsIsMutable();
          targetExpressions_.add(index, value);
          onChanged();
        } else {
          targetExpressionsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public Builder addTargetExpressions(
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder builderForValue) {
        if (targetExpressionsBuilder_ == null) {
          ensureTargetExpressionsIsMutable();
          targetExpressions_.add(builderForValue.build());
          onChanged();
        } else {
          targetExpressionsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public Builder addTargetExpressions(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder builderForValue) {
        if (targetExpressionsBuilder_ == null) {
          ensureTargetExpressionsIsMutable();
          targetExpressions_.add(index, builderForValue.build());
          onChanged();
        } else {
          targetExpressionsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public Builder addAllTargetExpressions(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto> values) {
        if (targetExpressionsBuilder_ == null) {
          ensureTargetExpressionsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, targetExpressions_);
          onChanged();
        } else {
          targetExpressionsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public Builder clearTargetExpressions() {
        if (targetExpressionsBuilder_ == null) {
          targetExpressions_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          targetExpressionsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public Builder removeTargetExpressions(int index) {
        if (targetExpressionsBuilder_ == null) {
          ensureTargetExpressionsIsMutable();
          targetExpressions_.remove(index);
          onChanged();
        } else {
          targetExpressionsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder getTargetExpressionsBuilder(
          int index) {
        return getTargetExpressionsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder getTargetExpressionsOrBuilder(
          int index) {
        if (targetExpressionsBuilder_ == null) {
          return targetExpressions_.get(index);  } else {
          return targetExpressionsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder> 
           getTargetExpressionsOrBuilderList() {
        if (targetExpressionsBuilder_ != null) {
          return targetExpressionsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(targetExpressions_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder addTargetExpressionsBuilder() {
        return getTargetExpressionsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder addTargetExpressionsBuilder(
          int index) {
        return getTargetExpressionsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintTargetProto targetExpressions = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder> 
           getTargetExpressionsBuilderList() {
        return getTargetExpressionsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder> 
          getTargetExpressionsFieldBuilder() {
        if (targetExpressionsBuilder_ == null) {
          targetExpressionsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder>(
                  targetExpressions_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          targetExpressions_ = null;
        }
        return targetExpressionsBuilder_;
      }

      private int minCardinality_ ;
      /**
       * <code>optional int32 minCardinality = 3;</code>
       * @return Whether the minCardinality field is set.
       */
      @java.lang.Override
      public boolean hasMinCardinality() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int32 minCardinality = 3;</code>
       * @return The minCardinality.
       */
      @java.lang.Override
      public int getMinCardinality() {
        return minCardinality_;
      }
      /**
       * <code>optional int32 minCardinality = 3;</code>
       * @param value The minCardinality to set.
       * @return This builder for chaining.
       */
      public Builder setMinCardinality(int value) {

        minCardinality_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 minCardinality = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearMinCardinality() {
        bitField0_ = (bitField0_ & ~0x00000004);
        minCardinality_ = 0;
        onChanged();
        return this;
      }

      private int maxCardinality_ ;
      /**
       * <code>optional int32 maxCardinality = 4;</code>
       * @return Whether the maxCardinality field is set.
       */
      @java.lang.Override
      public boolean hasMaxCardinality() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int32 maxCardinality = 4;</code>
       * @return The maxCardinality.
       */
      @java.lang.Override
      public int getMaxCardinality() {
        return maxCardinality_;
      }
      /**
       * <code>optional int32 maxCardinality = 4;</code>
       * @param value The maxCardinality to set.
       * @return This builder for chaining.
       */
      public Builder setMaxCardinality(int value) {

        maxCardinality_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 maxCardinality = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearMaxCardinality() {
        bitField0_ = (bitField0_ & ~0x00000008);
        maxCardinality_ = 0;
        onChanged();
        return this;
      }

      private int attributeOpCode_ = 1;
      /**
       * <code>optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5;</code>
       * @return Whether the attributeOpCode field is set.
       */
      @java.lang.Override public boolean hasAttributeOpCode() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5;</code>
       * @return The attributeOpCode.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto getAttributeOpCode() {
        org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto result = org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto.forNumber(attributeOpCode_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto.NO_OP : result;
      }
      /**
       * <code>optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5;</code>
       * @param value The attributeOpCode to set.
       * @return This builder for chaining.
       */
      public Builder setAttributeOpCode(org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeOpCodeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000010;
        attributeOpCode_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.NodeAttributeOpCodeProto attributeOpCode = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearAttributeOpCode() {
        bitField0_ = (bitField0_ & ~0x00000010);
        attributeOpCode_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.SimplePlacementConstraintProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.SimplePlacementConstraintProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<SimplePlacementConstraintProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<SimplePlacementConstraintProto>() {
      @java.lang.Override
      public SimplePlacementConstraintProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<SimplePlacementConstraintProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<SimplePlacementConstraintProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface PlacementConstraintTargetProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.PlacementConstraintTargetProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1;</code>
     * @return Whether the targetType field is set.
     */
    boolean hasTargetType();
    /**
     * <code>required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1;</code>
     * @return The targetType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType getTargetType();

    /**
     * <code>optional string targetKey = 2;</code>
     * @return Whether the targetKey field is set.
     */
    boolean hasTargetKey();
    /**
     * <code>optional string targetKey = 2;</code>
     * @return The targetKey.
     */
    java.lang.String getTargetKey();
    /**
     * <code>optional string targetKey = 2;</code>
     * @return The bytes for targetKey.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTargetKeyBytes();

    /**
     * <code>repeated string targetValues = 3;</code>
     * @return A list containing the targetValues.
     */
    java.util.List<java.lang.String>
        getTargetValuesList();
    /**
     * <code>repeated string targetValues = 3;</code>
     * @return The count of targetValues.
     */
    int getTargetValuesCount();
    /**
     * <code>repeated string targetValues = 3;</code>
     * @param index The index of the element to return.
     * @return The targetValues at the given index.
     */
    java.lang.String getTargetValues(int index);
    /**
     * <code>repeated string targetValues = 3;</code>
     * @param index The index of the value to return.
     * @return The bytes of the targetValues at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTargetValuesBytes(int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.PlacementConstraintTargetProto}
   */
  public static final class PlacementConstraintTargetProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.PlacementConstraintTargetProto)
      PlacementConstraintTargetProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use PlacementConstraintTargetProto.newBuilder() to construct.
    private PlacementConstraintTargetProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private PlacementConstraintTargetProto() {
      targetType_ = 1;
      targetKey_ = "";
      targetValues_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new PlacementConstraintTargetProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintTargetProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder.class);
    }

    /**
     * Protobuf enum {@code hadoop.yarn.PlacementConstraintTargetProto.TargetType}
     */
    public enum TargetType
        implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
      /**
       * <code>NODE_ATTRIBUTE = 1;</code>
       */
      NODE_ATTRIBUTE(1),
      /**
       * <code>ALLOCATION_TAG = 2;</code>
       */
      ALLOCATION_TAG(2),
      /**
       * <code>SELF = 3;</code>
       */
      SELF(3),
      ;

      /**
       * <code>NODE_ATTRIBUTE = 1;</code>
       */
      public static final int NODE_ATTRIBUTE_VALUE = 1;
      /**
       * <code>ALLOCATION_TAG = 2;</code>
       */
      public static final int ALLOCATION_TAG_VALUE = 2;
      /**
       * <code>SELF = 3;</code>
       */
      public static final int SELF_VALUE = 3;


      public final int getNumber() {
        return value;
      }

      /**
       * @param value The numeric wire value of the corresponding enum entry.
       * @return The enum associated with the given numeric wire value.
       * @deprecated Use {@link #forNumber(int)} instead.
       */
      @java.lang.Deprecated
      public static TargetType valueOf(int value) {
        return forNumber(value);
      }

      /**
       * @param value The numeric wire value of the corresponding enum entry.
       * @return The enum associated with the given numeric wire value.
       */
      public static TargetType forNumber(int value) {
        switch (value) {
          case 1: return NODE_ATTRIBUTE;
          case 2: return ALLOCATION_TAG;
          case 3: return SELF;
          default: return null;
        }
      }

      public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<TargetType>
          internalGetValueMap() {
        return internalValueMap;
      }
      private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
          TargetType> internalValueMap =
            new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<TargetType>() {
              public TargetType findValueByNumber(int number) {
                return TargetType.forNumber(number);
              }
            };

      public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
          getValueDescriptor() {
        return getDescriptor().getValues().get(ordinal());
      }
      public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
          getDescriptorForType() {
        return getDescriptor();
      }
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.getDescriptor().getEnumTypes().get(0);
      }

      private static final TargetType[] VALUES = values();

      public static TargetType valueOf(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
        if (desc.getType() != getDescriptor()) {
          throw new java.lang.IllegalArgumentException(
            "EnumValueDescriptor is not for this type.");
        }
        return VALUES[desc.getIndex()];
      }

      private final int value;

      private TargetType(int value) {
        this.value = value;
      }

      // @@protoc_insertion_point(enum_scope:hadoop.yarn.PlacementConstraintTargetProto.TargetType)
    }

    private int bitField0_;
    public static final int TARGETTYPE_FIELD_NUMBER = 1;
    private int targetType_ = 1;
    /**
     * <code>required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1;</code>
     * @return Whether the targetType field is set.
     */
    @java.lang.Override public boolean hasTargetType() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1;</code>
     * @return The targetType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType getTargetType() {
      org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType result = org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType.forNumber(targetType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType.NODE_ATTRIBUTE : result;
    }

    public static final int TARGETKEY_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object targetKey_ = "";
    /**
     * <code>optional string targetKey = 2;</code>
     * @return Whether the targetKey field is set.
     */
    @java.lang.Override
    public boolean hasTargetKey() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string targetKey = 2;</code>
     * @return The targetKey.
     */
    @java.lang.Override
    public java.lang.String getTargetKey() {
      java.lang.Object ref = targetKey_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          targetKey_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string targetKey = 2;</code>
     * @return The bytes for targetKey.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTargetKeyBytes() {
      java.lang.Object ref = targetKey_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        targetKey_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int TARGETVALUES_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList targetValues_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string targetValues = 3;</code>
     * @return A list containing the targetValues.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getTargetValuesList() {
      return targetValues_;
    }
    /**
     * <code>repeated string targetValues = 3;</code>
     * @return The count of targetValues.
     */
    public int getTargetValuesCount() {
      return targetValues_.size();
    }
    /**
     * <code>repeated string targetValues = 3;</code>
     * @param index The index of the element to return.
     * @return The targetValues at the given index.
     */
    public java.lang.String getTargetValues(int index) {
      return targetValues_.get(index);
    }
    /**
     * <code>repeated string targetValues = 3;</code>
     * @param index The index of the value to return.
     * @return The bytes of the targetValues at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTargetValuesBytes(int index) {
      return targetValues_.getByteString(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasTargetType()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, targetType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, targetKey_);
      }
      for (int i = 0; i < targetValues_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, targetValues_.getRaw(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, targetType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, targetKey_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < targetValues_.size(); i++) {
          dataSize += computeStringSizeNoTag(targetValues_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getTargetValuesList().size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto other = (org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto) obj;

      if (hasTargetType() != other.hasTargetType()) return false;
      if (hasTargetType()) {
        if (targetType_ != other.targetType_) return false;
      }
      if (hasTargetKey() != other.hasTargetKey()) return false;
      if (hasTargetKey()) {
        if (!getTargetKey()
            .equals(other.getTargetKey())) return false;
      }
      if (!getTargetValuesList()
          .equals(other.getTargetValuesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasTargetType()) {
        hash = (37 * hash) + TARGETTYPE_FIELD_NUMBER;
        hash = (53 * hash) + targetType_;
      }
      if (hasTargetKey()) {
        hash = (37 * hash) + TARGETKEY_FIELD_NUMBER;
        hash = (53 * hash) + getTargetKey().hashCode();
      }
      if (getTargetValuesCount() > 0) {
        hash = (37 * hash) + TARGETVALUES_FIELD_NUMBER;
        hash = (53 * hash) + getTargetValuesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.PlacementConstraintTargetProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.PlacementConstraintTargetProto)
        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintTargetProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        targetType_ = 1;
        targetKey_ = "";
        targetValues_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.targetType_ = targetType_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.targetKey_ = targetKey_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          targetValues_.makeImmutable();
          result.targetValues_ = targetValues_;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.getDefaultInstance()) return this;
        if (other.hasTargetType()) {
          setTargetType(other.getTargetType());
        }
        if (other.hasTargetKey()) {
          targetKey_ = other.targetKey_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (!other.targetValues_.isEmpty()) {
          if (targetValues_.isEmpty()) {
            targetValues_ = other.targetValues_;
            bitField0_ |= 0x00000004;
          } else {
            ensureTargetValuesIsMutable();
            targetValues_.addAll(other.targetValues_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasTargetType()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  targetType_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 18: {
                targetKey_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureTargetValuesIsMutable();
                targetValues_.add(bs);
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int targetType_ = 1;
      /**
       * <code>required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1;</code>
       * @return Whether the targetType field is set.
       */
      @java.lang.Override public boolean hasTargetType() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1;</code>
       * @return The targetType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType getTargetType() {
        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType result = org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType.forNumber(targetType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType.NODE_ATTRIBUTE : result;
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1;</code>
       * @param value The targetType to set.
       * @return This builder for chaining.
       */
      public Builder setTargetType(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto.TargetType value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        targetType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintTargetProto.TargetType targetType = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearTargetType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        targetType_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object targetKey_ = "";
      /**
       * <code>optional string targetKey = 2;</code>
       * @return Whether the targetKey field is set.
       */
      public boolean hasTargetKey() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string targetKey = 2;</code>
       * @return The targetKey.
       */
      public java.lang.String getTargetKey() {
        java.lang.Object ref = targetKey_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            targetKey_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string targetKey = 2;</code>
       * @return The bytes for targetKey.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTargetKeyBytes() {
        java.lang.Object ref = targetKey_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          targetKey_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string targetKey = 2;</code>
       * @param value The targetKey to set.
       * @return This builder for chaining.
       */
      public Builder setTargetKey(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        targetKey_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string targetKey = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearTargetKey() {
        targetKey_ = getDefaultInstance().getTargetKey();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string targetKey = 2;</code>
       * @param value The bytes for targetKey to set.
       * @return This builder for chaining.
       */
      public Builder setTargetKeyBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        targetKey_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList targetValues_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureTargetValuesIsMutable() {
        if (!targetValues_.isModifiable()) {
          targetValues_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(targetValues_);
        }
        bitField0_ |= 0x00000004;
      }
      /**
       * <code>repeated string targetValues = 3;</code>
       * @return A list containing the targetValues.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getTargetValuesList() {
        targetValues_.makeImmutable();
        return targetValues_;
      }
      /**
       * <code>repeated string targetValues = 3;</code>
       * @return The count of targetValues.
       */
      public int getTargetValuesCount() {
        return targetValues_.size();
      }
      /**
       * <code>repeated string targetValues = 3;</code>
       * @param index The index of the element to return.
       * @return The targetValues at the given index.
       */
      public java.lang.String getTargetValues(int index) {
        return targetValues_.get(index);
      }
      /**
       * <code>repeated string targetValues = 3;</code>
       * @param index The index of the value to return.
       * @return The bytes of the targetValues at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTargetValuesBytes(int index) {
        return targetValues_.getByteString(index);
      }
      /**
       * <code>repeated string targetValues = 3;</code>
       * @param index The index to set the value at.
       * @param value The targetValues to set.
       * @return This builder for chaining.
       */
      public Builder setTargetValues(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureTargetValuesIsMutable();
        targetValues_.set(index, value);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string targetValues = 3;</code>
       * @param value The targetValues to add.
       * @return This builder for chaining.
       */
      public Builder addTargetValues(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureTargetValuesIsMutable();
        targetValues_.add(value);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string targetValues = 3;</code>
       * @param values The targetValues to add.
       * @return This builder for chaining.
       */
      public Builder addAllTargetValues(
          java.lang.Iterable<java.lang.String> values) {
        ensureTargetValuesIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, targetValues_);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string targetValues = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearTargetValues() {
        targetValues_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000004);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string targetValues = 3;</code>
       * @param value The bytes of the targetValues to add.
       * @return This builder for chaining.
       */
      public Builder addTargetValuesBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureTargetValuesIsMutable();
        targetValues_.add(value);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.PlacementConstraintTargetProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.PlacementConstraintTargetProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<PlacementConstraintTargetProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<PlacementConstraintTargetProto>() {
      @java.lang.Override
      public PlacementConstraintTargetProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<PlacementConstraintTargetProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<PlacementConstraintTargetProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintTargetProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface TimedPlacementConstraintProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.TimedPlacementConstraintProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
     * @return Whether the placementConstraint field is set.
     */
    boolean hasPlacementConstraint();
    /**
     * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
     * @return The placementConstraint.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint();
    /**
     * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder();

    /**
     * <code>required int64 schedulingDelay = 2;</code>
     * @return Whether the schedulingDelay field is set.
     */
    boolean hasSchedulingDelay();
    /**
     * <code>required int64 schedulingDelay = 2;</code>
     * @return The schedulingDelay.
     */
    long getSchedulingDelay();

    /**
     * <code>optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS];</code>
     * @return Whether the delayUnit field is set.
     */
    boolean hasDelayUnit();
    /**
     * <code>optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS];</code>
     * @return The delayUnit.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit getDelayUnit();
  }
  /**
   * Protobuf type {@code hadoop.yarn.TimedPlacementConstraintProto}
   */
  public static final class TimedPlacementConstraintProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.TimedPlacementConstraintProto)
      TimedPlacementConstraintProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use TimedPlacementConstraintProto.newBuilder() to construct.
    private TimedPlacementConstraintProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private TimedPlacementConstraintProto() {
      delayUnit_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new TimedPlacementConstraintProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_TimedPlacementConstraintProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder.class);
    }

    /**
     * Protobuf enum {@code hadoop.yarn.TimedPlacementConstraintProto.DelayUnit}
     */
    public enum DelayUnit
        implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
      /**
       * <code>MILLISECONDS = 1;</code>
       */
      MILLISECONDS(1),
      /**
       * <code>OPPORTUNITIES = 2;</code>
       */
      OPPORTUNITIES(2),
      ;

      /**
       * <code>MILLISECONDS = 1;</code>
       */
      public static final int MILLISECONDS_VALUE = 1;
      /**
       * <code>OPPORTUNITIES = 2;</code>
       */
      public static final int OPPORTUNITIES_VALUE = 2;


      public final int getNumber() {
        return value;
      }

      /**
       * @param value The numeric wire value of the corresponding enum entry.
       * @return The enum associated with the given numeric wire value.
       * @deprecated Use {@link #forNumber(int)} instead.
       */
      @java.lang.Deprecated
      public static DelayUnit valueOf(int value) {
        return forNumber(value);
      }

      /**
       * @param value The numeric wire value of the corresponding enum entry.
       * @return The enum associated with the given numeric wire value.
       */
      public static DelayUnit forNumber(int value) {
        switch (value) {
          case 1: return MILLISECONDS;
          case 2: return OPPORTUNITIES;
          default: return null;
        }
      }

      public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<DelayUnit>
          internalGetValueMap() {
        return internalValueMap;
      }
      private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
          DelayUnit> internalValueMap =
            new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<DelayUnit>() {
              public DelayUnit findValueByNumber(int number) {
                return DelayUnit.forNumber(number);
              }
            };

      public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
          getValueDescriptor() {
        return getDescriptor().getValues().get(ordinal());
      }
      public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
          getDescriptorForType() {
        return getDescriptor();
      }
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.getDescriptor().getEnumTypes().get(0);
      }

      private static final DelayUnit[] VALUES = values();

      public static DelayUnit valueOf(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
        if (desc.getType() != getDescriptor()) {
          throw new java.lang.IllegalArgumentException(
            "EnumValueDescriptor is not for this type.");
        }
        return VALUES[desc.getIndex()];
      }

      private final int value;

      private DelayUnit(int value) {
        this.value = value;
      }

      // @@protoc_insertion_point(enum_scope:hadoop.yarn.TimedPlacementConstraintProto.DelayUnit)
    }

    private int bitField0_;
    public static final int PLACEMENTCONSTRAINT_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_;
    /**
     * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
     * @return Whether the placementConstraint field is set.
     */
    @java.lang.Override
    public boolean hasPlacementConstraint() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
     * @return The placementConstraint.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() {
      return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
    }
    /**
     * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() {
      return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
    }

    public static final int SCHEDULINGDELAY_FIELD_NUMBER = 2;
    private long schedulingDelay_ = 0L;
    /**
     * <code>required int64 schedulingDelay = 2;</code>
     * @return Whether the schedulingDelay field is set.
     */
    @java.lang.Override
    public boolean hasSchedulingDelay() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required int64 schedulingDelay = 2;</code>
     * @return The schedulingDelay.
     */
    @java.lang.Override
    public long getSchedulingDelay() {
      return schedulingDelay_;
    }

    public static final int DELAYUNIT_FIELD_NUMBER = 3;
    private int delayUnit_ = 1;
    /**
     * <code>optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS];</code>
     * @return Whether the delayUnit field is set.
     */
    @java.lang.Override public boolean hasDelayUnit() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS];</code>
     * @return The delayUnit.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit getDelayUnit() {
      org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit result = org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit.forNumber(delayUnit_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit.MILLISECONDS : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasPlacementConstraint()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasSchedulingDelay()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getPlacementConstraint().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getPlacementConstraint());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, schedulingDelay_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeEnum(3, delayUnit_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getPlacementConstraint());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, schedulingDelay_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(3, delayUnit_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto other = (org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto) obj;

      if (hasPlacementConstraint() != other.hasPlacementConstraint()) return false;
      if (hasPlacementConstraint()) {
        if (!getPlacementConstraint()
            .equals(other.getPlacementConstraint())) return false;
      }
      if (hasSchedulingDelay() != other.hasSchedulingDelay()) return false;
      if (hasSchedulingDelay()) {
        if (getSchedulingDelay()
            != other.getSchedulingDelay()) return false;
      }
      if (hasDelayUnit() != other.hasDelayUnit()) return false;
      if (hasDelayUnit()) {
        if (delayUnit_ != other.delayUnit_) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasPlacementConstraint()) {
        hash = (37 * hash) + PLACEMENTCONSTRAINT_FIELD_NUMBER;
        hash = (53 * hash) + getPlacementConstraint().hashCode();
      }
      if (hasSchedulingDelay()) {
        hash = (37 * hash) + SCHEDULINGDELAY_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getSchedulingDelay());
      }
      if (hasDelayUnit()) {
        hash = (37 * hash) + DELAYUNIT_FIELD_NUMBER;
        hash = (53 * hash) + delayUnit_;
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.TimedPlacementConstraintProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.TimedPlacementConstraintProto)
        org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_TimedPlacementConstraintProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getPlacementConstraintFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        placementConstraint_ = null;
        if (placementConstraintBuilder_ != null) {
          placementConstraintBuilder_.dispose();
          placementConstraintBuilder_ = null;
        }
        schedulingDelay_ = 0L;
        delayUnit_ = 1;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto result = new org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.placementConstraint_ = placementConstraintBuilder_ == null
              ? placementConstraint_
              : placementConstraintBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.schedulingDelay_ = schedulingDelay_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.delayUnit_ = delayUnit_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.getDefaultInstance()) return this;
        if (other.hasPlacementConstraint()) {
          mergePlacementConstraint(other.getPlacementConstraint());
        }
        if (other.hasSchedulingDelay()) {
          setSchedulingDelay(other.getSchedulingDelay());
        }
        if (other.hasDelayUnit()) {
          setDelayUnit(other.getDelayUnit());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasPlacementConstraint()) {
          return false;
        }
        if (!hasSchedulingDelay()) {
          return false;
        }
        if (!getPlacementConstraint().isInitialized()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getPlacementConstraintFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                schedulingDelay_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 24: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(3, tmpRaw);
                } else {
                  delayUnit_ = tmpRaw;
                  bitField0_ |= 0x00000004;
                }
                break;
              } // case 24
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> placementConstraintBuilder_;
      /**
       * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
       * @return Whether the placementConstraint field is set.
       */
      public boolean hasPlacementConstraint() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
       * @return The placementConstraint.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() {
        if (placementConstraintBuilder_ == null) {
          return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
        } else {
          return placementConstraintBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
       */
      public Builder setPlacementConstraint(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) {
        if (placementConstraintBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          placementConstraint_ = value;
        } else {
          placementConstraintBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
       */
      public Builder setPlacementConstraint(
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) {
        if (placementConstraintBuilder_ == null) {
          placementConstraint_ = builderForValue.build();
        } else {
          placementConstraintBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
       */
      public Builder mergePlacementConstraint(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) {
        if (placementConstraintBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            placementConstraint_ != null &&
            placementConstraint_ != org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance()) {
            getPlacementConstraintBuilder().mergeFrom(value);
          } else {
            placementConstraint_ = value;
          }
        } else {
          placementConstraintBuilder_.mergeFrom(value);
        }
        if (placementConstraint_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
       */
      public Builder clearPlacementConstraint() {
        bitField0_ = (bitField0_ & ~0x00000001);
        placementConstraint_ = null;
        if (placementConstraintBuilder_ != null) {
          placementConstraintBuilder_.dispose();
          placementConstraintBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder getPlacementConstraintBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getPlacementConstraintFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() {
        if (placementConstraintBuilder_ != null) {
          return placementConstraintBuilder_.getMessageOrBuilder();
        } else {
          return placementConstraint_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
        }
      }
      /**
       * <code>required .hadoop.yarn.PlacementConstraintProto placementConstraint = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> 
          getPlacementConstraintFieldBuilder() {
        if (placementConstraintBuilder_ == null) {
          placementConstraintBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder>(
                  getPlacementConstraint(),
                  getParentForChildren(),
                  isClean());
          placementConstraint_ = null;
        }
        return placementConstraintBuilder_;
      }

      private long schedulingDelay_ ;
      /**
       * <code>required int64 schedulingDelay = 2;</code>
       * @return Whether the schedulingDelay field is set.
       */
      @java.lang.Override
      public boolean hasSchedulingDelay() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required int64 schedulingDelay = 2;</code>
       * @return The schedulingDelay.
       */
      @java.lang.Override
      public long getSchedulingDelay() {
        return schedulingDelay_;
      }
      /**
       * <code>required int64 schedulingDelay = 2;</code>
       * @param value The schedulingDelay to set.
       * @return This builder for chaining.
       */
      public Builder setSchedulingDelay(long value) {

        schedulingDelay_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required int64 schedulingDelay = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearSchedulingDelay() {
        bitField0_ = (bitField0_ & ~0x00000002);
        schedulingDelay_ = 0L;
        onChanged();
        return this;
      }

      private int delayUnit_ = 1;
      /**
       * <code>optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS];</code>
       * @return Whether the delayUnit field is set.
       */
      @java.lang.Override public boolean hasDelayUnit() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS];</code>
       * @return The delayUnit.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit getDelayUnit() {
        org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit result = org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit.forNumber(delayUnit_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit.MILLISECONDS : result;
      }
      /**
       * <code>optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS];</code>
       * @param value The delayUnit to set.
       * @return This builder for chaining.
       */
      public Builder setDelayUnit(org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.DelayUnit value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000004;
        delayUnit_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.TimedPlacementConstraintProto.DelayUnit delayUnit = 3 [default = MILLISECONDS];</code>
       * @return This builder for chaining.
       */
      public Builder clearDelayUnit() {
        bitField0_ = (bitField0_ & ~0x00000004);
        delayUnit_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.TimedPlacementConstraintProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.TimedPlacementConstraintProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<TimedPlacementConstraintProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<TimedPlacementConstraintProto>() {
      @java.lang.Override
      public TimedPlacementConstraintProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<TimedPlacementConstraintProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<TimedPlacementConstraintProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface CompositePlacementConstraintProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.CompositePlacementConstraintProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1;</code>
     * @return Whether the compositeType field is set.
     */
    boolean hasCompositeType();
    /**
     * <code>required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1;</code>
     * @return The compositeType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType getCompositeType();

    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto> 
        getChildConstraintsList();
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getChildConstraints(int index);
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
     */
    int getChildConstraintsCount();
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> 
        getChildConstraintsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getChildConstraintsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto> 
        getTimedChildConstraintsList();
    /**
     * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getTimedChildConstraints(int index);
    /**
     * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
     */
    int getTimedChildConstraintsCount();
    /**
     * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder> 
        getTimedChildConstraintsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder getTimedChildConstraintsOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.CompositePlacementConstraintProto}
   */
  public static final class CompositePlacementConstraintProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.CompositePlacementConstraintProto)
      CompositePlacementConstraintProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use CompositePlacementConstraintProto.newBuilder() to construct.
    private CompositePlacementConstraintProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private CompositePlacementConstraintProto() {
      compositeType_ = 1;
      childConstraints_ = java.util.Collections.emptyList();
      timedChildConstraints_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new CompositePlacementConstraintProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CompositePlacementConstraintProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder.class);
    }

    /**
     * Protobuf enum {@code hadoop.yarn.CompositePlacementConstraintProto.CompositeType}
     */
    public enum CompositeType
        implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
      /**
       * <pre>
       * All children constraints have to be satisfied.
       * </pre>
       *
       * <code>AND = 1;</code>
       */
      AND(1),
      /**
       * <pre>
       * One of the children constraints has to be satisfied.
       * </pre>
       *
       * <code>OR = 2;</code>
       */
      OR(2),
      /**
       * <pre>
       * Attempt to satisfy the first child constraint for delays[0] units (e.g.,
       * millisec or heartbeats). If this fails, try to satisfy the second child
       * constraint for delays[1] units and so on.
       * </pre>
       *
       * <code>DELAYED_OR = 3;</code>
       */
      DELAYED_OR(3),
      ;

      /**
       * <pre>
       * All children constraints have to be satisfied.
       * </pre>
       *
       * <code>AND = 1;</code>
       */
      public static final int AND_VALUE = 1;
      /**
       * <pre>
       * One of the children constraints has to be satisfied.
       * </pre>
       *
       * <code>OR = 2;</code>
       */
      public static final int OR_VALUE = 2;
      /**
       * <pre>
       * Attempt to satisfy the first child constraint for delays[0] units (e.g.,
       * millisec or heartbeats). If this fails, try to satisfy the second child
       * constraint for delays[1] units and so on.
       * </pre>
       *
       * <code>DELAYED_OR = 3;</code>
       */
      public static final int DELAYED_OR_VALUE = 3;


      public final int getNumber() {
        return value;
      }

      /**
       * @param value The numeric wire value of the corresponding enum entry.
       * @return The enum associated with the given numeric wire value.
       * @deprecated Use {@link #forNumber(int)} instead.
       */
      @java.lang.Deprecated
      public static CompositeType valueOf(int value) {
        return forNumber(value);
      }

      /**
       * @param value The numeric wire value of the corresponding enum entry.
       * @return The enum associated with the given numeric wire value.
       */
      public static CompositeType forNumber(int value) {
        switch (value) {
          case 1: return AND;
          case 2: return OR;
          case 3: return DELAYED_OR;
          default: return null;
        }
      }

      public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<CompositeType>
          internalGetValueMap() {
        return internalValueMap;
      }
      private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
          CompositeType> internalValueMap =
            new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<CompositeType>() {
              public CompositeType findValueByNumber(int number) {
                return CompositeType.forNumber(number);
              }
            };

      public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
          getValueDescriptor() {
        return getDescriptor().getValues().get(ordinal());
      }
      public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
          getDescriptorForType() {
        return getDescriptor();
      }
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDescriptor().getEnumTypes().get(0);
      }

      private static final CompositeType[] VALUES = values();

      public static CompositeType valueOf(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
        if (desc.getType() != getDescriptor()) {
          throw new java.lang.IllegalArgumentException(
            "EnumValueDescriptor is not for this type.");
        }
        return VALUES[desc.getIndex()];
      }

      private final int value;

      private CompositeType(int value) {
        this.value = value;
      }

      // @@protoc_insertion_point(enum_scope:hadoop.yarn.CompositePlacementConstraintProto.CompositeType)
    }

    private int bitField0_;
    public static final int COMPOSITETYPE_FIELD_NUMBER = 1;
    private int compositeType_ = 1;
    /**
     * <code>required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1;</code>
     * @return Whether the compositeType field is set.
     */
    @java.lang.Override public boolean hasCompositeType() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1;</code>
     * @return The compositeType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType getCompositeType() {
      org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType result = org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType.forNumber(compositeType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType.AND : result;
    }

    public static final int CHILDCONSTRAINTS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto> childConstraints_;
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto> getChildConstraintsList() {
      return childConstraints_;
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> 
        getChildConstraintsOrBuilderList() {
      return childConstraints_;
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
     */
    @java.lang.Override
    public int getChildConstraintsCount() {
      return childConstraints_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getChildConstraints(int index) {
      return childConstraints_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getChildConstraintsOrBuilder(
        int index) {
      return childConstraints_.get(index);
    }

    public static final int TIMEDCHILDCONSTRAINTS_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto> timedChildConstraints_;
    /**
     * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto> getTimedChildConstraintsList() {
      return timedChildConstraints_;
    }
    /**
     * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder> 
        getTimedChildConstraintsOrBuilderList() {
      return timedChildConstraints_;
    }
    /**
     * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
     */
    @java.lang.Override
    public int getTimedChildConstraintsCount() {
      return timedChildConstraints_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getTimedChildConstraints(int index) {
      return timedChildConstraints_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder getTimedChildConstraintsOrBuilder(
        int index) {
      return timedChildConstraints_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasCompositeType()) {
        memoizedIsInitialized = 0;
        return false;
      }
      for (int i = 0; i < getChildConstraintsCount(); i++) {
        if (!getChildConstraints(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getTimedChildConstraintsCount(); i++) {
        if (!getTimedChildConstraints(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, compositeType_);
      }
      for (int i = 0; i < childConstraints_.size(); i++) {
        output.writeMessage(2, childConstraints_.get(i));
      }
      for (int i = 0; i < timedChildConstraints_.size(); i++) {
        output.writeMessage(3, timedChildConstraints_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, compositeType_);
      }
      for (int i = 0; i < childConstraints_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, childConstraints_.get(i));
      }
      for (int i = 0; i < timedChildConstraints_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, timedChildConstraints_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto other = (org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto) obj;

      if (hasCompositeType() != other.hasCompositeType()) return false;
      if (hasCompositeType()) {
        if (compositeType_ != other.compositeType_) return false;
      }
      if (!getChildConstraintsList()
          .equals(other.getChildConstraintsList())) return false;
      if (!getTimedChildConstraintsList()
          .equals(other.getTimedChildConstraintsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasCompositeType()) {
        hash = (37 * hash) + COMPOSITETYPE_FIELD_NUMBER;
        hash = (53 * hash) + compositeType_;
      }
      if (getChildConstraintsCount() > 0) {
        hash = (37 * hash) + CHILDCONSTRAINTS_FIELD_NUMBER;
        hash = (53 * hash) + getChildConstraintsList().hashCode();
      }
      if (getTimedChildConstraintsCount() > 0) {
        hash = (37 * hash) + TIMEDCHILDCONSTRAINTS_FIELD_NUMBER;
        hash = (53 * hash) + getTimedChildConstraintsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.CompositePlacementConstraintProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.CompositePlacementConstraintProto)
        org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CompositePlacementConstraintProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.class, org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        compositeType_ = 1;
        if (childConstraintsBuilder_ == null) {
          childConstraints_ = java.util.Collections.emptyList();
        } else {
          childConstraints_ = null;
          childConstraintsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        if (timedChildConstraintsBuilder_ == null) {
          timedChildConstraints_ = java.util.Collections.emptyList();
        } else {
          timedChildConstraints_ = null;
          timedChildConstraintsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto result = new org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto result) {
        if (childConstraintsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            childConstraints_ = java.util.Collections.unmodifiableList(childConstraints_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.childConstraints_ = childConstraints_;
        } else {
          result.childConstraints_ = childConstraintsBuilder_.build();
        }
        if (timedChildConstraintsBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0)) {
            timedChildConstraints_ = java.util.Collections.unmodifiableList(timedChildConstraints_);
            bitField0_ = (bitField0_ & ~0x00000004);
          }
          result.timedChildConstraints_ = timedChildConstraints_;
        } else {
          result.timedChildConstraints_ = timedChildConstraintsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.compositeType_ = compositeType_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.getDefaultInstance()) return this;
        if (other.hasCompositeType()) {
          setCompositeType(other.getCompositeType());
        }
        if (childConstraintsBuilder_ == null) {
          if (!other.childConstraints_.isEmpty()) {
            if (childConstraints_.isEmpty()) {
              childConstraints_ = other.childConstraints_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureChildConstraintsIsMutable();
              childConstraints_.addAll(other.childConstraints_);
            }
            onChanged();
          }
        } else {
          if (!other.childConstraints_.isEmpty()) {
            if (childConstraintsBuilder_.isEmpty()) {
              childConstraintsBuilder_.dispose();
              childConstraintsBuilder_ = null;
              childConstraints_ = other.childConstraints_;
              bitField0_ = (bitField0_ & ~0x00000002);
              childConstraintsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getChildConstraintsFieldBuilder() : null;
            } else {
              childConstraintsBuilder_.addAllMessages(other.childConstraints_);
            }
          }
        }
        if (timedChildConstraintsBuilder_ == null) {
          if (!other.timedChildConstraints_.isEmpty()) {
            if (timedChildConstraints_.isEmpty()) {
              timedChildConstraints_ = other.timedChildConstraints_;
              bitField0_ = (bitField0_ & ~0x00000004);
            } else {
              ensureTimedChildConstraintsIsMutable();
              timedChildConstraints_.addAll(other.timedChildConstraints_);
            }
            onChanged();
          }
        } else {
          if (!other.timedChildConstraints_.isEmpty()) {
            if (timedChildConstraintsBuilder_.isEmpty()) {
              timedChildConstraintsBuilder_.dispose();
              timedChildConstraintsBuilder_ = null;
              timedChildConstraints_ = other.timedChildConstraints_;
              bitField0_ = (bitField0_ & ~0x00000004);
              timedChildConstraintsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getTimedChildConstraintsFieldBuilder() : null;
            } else {
              timedChildConstraintsBuilder_.addAllMessages(other.timedChildConstraints_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasCompositeType()) {
          return false;
        }
        for (int i = 0; i < getChildConstraintsCount(); i++) {
          if (!getChildConstraints(i).isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getTimedChildConstraintsCount(); i++) {
          if (!getTimedChildConstraints(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  compositeType_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.PARSER,
                        extensionRegistry);
                if (childConstraintsBuilder_ == null) {
                  ensureChildConstraintsIsMutable();
                  childConstraints_.add(m);
                } else {
                  childConstraintsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              case 26: {
                org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.PARSER,
                        extensionRegistry);
                if (timedChildConstraintsBuilder_ == null) {
                  ensureTimedChildConstraintsIsMutable();
                  timedChildConstraints_.add(m);
                } else {
                  timedChildConstraintsBuilder_.addMessage(m);
                }
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int compositeType_ = 1;
      /**
       * <code>required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1;</code>
       * @return Whether the compositeType field is set.
       */
      @java.lang.Override public boolean hasCompositeType() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1;</code>
       * @return The compositeType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType getCompositeType() {
        org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType result = org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType.forNumber(compositeType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType.AND : result;
      }
      /**
       * <code>required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1;</code>
       * @param value The compositeType to set.
       * @return This builder for chaining.
       */
      public Builder setCompositeType(org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto.CompositeType value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        compositeType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.CompositePlacementConstraintProto.CompositeType compositeType = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearCompositeType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        compositeType_ = 1;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto> childConstraints_ =
        java.util.Collections.emptyList();
      private void ensureChildConstraintsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          childConstraints_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto>(childConstraints_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> childConstraintsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto> getChildConstraintsList() {
        if (childConstraintsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(childConstraints_);
        } else {
          return childConstraintsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public int getChildConstraintsCount() {
        if (childConstraintsBuilder_ == null) {
          return childConstraints_.size();
        } else {
          return childConstraintsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getChildConstraints(int index) {
        if (childConstraintsBuilder_ == null) {
          return childConstraints_.get(index);
        } else {
          return childConstraintsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public Builder setChildConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) {
        if (childConstraintsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureChildConstraintsIsMutable();
          childConstraints_.set(index, value);
          onChanged();
        } else {
          childConstraintsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public Builder setChildConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) {
        if (childConstraintsBuilder_ == null) {
          ensureChildConstraintsIsMutable();
          childConstraints_.set(index, builderForValue.build());
          onChanged();
        } else {
          childConstraintsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public Builder addChildConstraints(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) {
        if (childConstraintsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureChildConstraintsIsMutable();
          childConstraints_.add(value);
          onChanged();
        } else {
          childConstraintsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public Builder addChildConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) {
        if (childConstraintsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureChildConstraintsIsMutable();
          childConstraints_.add(index, value);
          onChanged();
        } else {
          childConstraintsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public Builder addChildConstraints(
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) {
        if (childConstraintsBuilder_ == null) {
          ensureChildConstraintsIsMutable();
          childConstraints_.add(builderForValue.build());
          onChanged();
        } else {
          childConstraintsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public Builder addChildConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) {
        if (childConstraintsBuilder_ == null) {
          ensureChildConstraintsIsMutable();
          childConstraints_.add(index, builderForValue.build());
          onChanged();
        } else {
          childConstraintsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public Builder addAllChildConstraints(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto> values) {
        if (childConstraintsBuilder_ == null) {
          ensureChildConstraintsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, childConstraints_);
          onChanged();
        } else {
          childConstraintsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public Builder clearChildConstraints() {
        if (childConstraintsBuilder_ == null) {
          childConstraints_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          childConstraintsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public Builder removeChildConstraints(int index) {
        if (childConstraintsBuilder_ == null) {
          ensureChildConstraintsIsMutable();
          childConstraints_.remove(index);
          onChanged();
        } else {
          childConstraintsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder getChildConstraintsBuilder(
          int index) {
        return getChildConstraintsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getChildConstraintsOrBuilder(
          int index) {
        if (childConstraintsBuilder_ == null) {
          return childConstraints_.get(index);  } else {
          return childConstraintsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> 
           getChildConstraintsOrBuilderList() {
        if (childConstraintsBuilder_ != null) {
          return childConstraintsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(childConstraints_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder addChildConstraintsBuilder() {
        return getChildConstraintsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder addChildConstraintsBuilder(
          int index) {
        return getChildConstraintsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.PlacementConstraintProto childConstraints = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder> 
           getChildConstraintsBuilderList() {
        return getChildConstraintsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> 
          getChildConstraintsFieldBuilder() {
        if (childConstraintsBuilder_ == null) {
          childConstraintsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder>(
                  childConstraints_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          childConstraints_ = null;
        }
        return childConstraintsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto> timedChildConstraints_ =
        java.util.Collections.emptyList();
      private void ensureTimedChildConstraintsIsMutable() {
        if (!((bitField0_ & 0x00000004) != 0)) {
          timedChildConstraints_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto>(timedChildConstraints_);
          bitField0_ |= 0x00000004;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder> timedChildConstraintsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto> getTimedChildConstraintsList() {
        if (timedChildConstraintsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(timedChildConstraints_);
        } else {
          return timedChildConstraintsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public int getTimedChildConstraintsCount() {
        if (timedChildConstraintsBuilder_ == null) {
          return timedChildConstraints_.size();
        } else {
          return timedChildConstraintsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto getTimedChildConstraints(int index) {
        if (timedChildConstraintsBuilder_ == null) {
          return timedChildConstraints_.get(index);
        } else {
          return timedChildConstraintsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public Builder setTimedChildConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto value) {
        if (timedChildConstraintsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureTimedChildConstraintsIsMutable();
          timedChildConstraints_.set(index, value);
          onChanged();
        } else {
          timedChildConstraintsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public Builder setTimedChildConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder builderForValue) {
        if (timedChildConstraintsBuilder_ == null) {
          ensureTimedChildConstraintsIsMutable();
          timedChildConstraints_.set(index, builderForValue.build());
          onChanged();
        } else {
          timedChildConstraintsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public Builder addTimedChildConstraints(org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto value) {
        if (timedChildConstraintsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureTimedChildConstraintsIsMutable();
          timedChildConstraints_.add(value);
          onChanged();
        } else {
          timedChildConstraintsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public Builder addTimedChildConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto value) {
        if (timedChildConstraintsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureTimedChildConstraintsIsMutable();
          timedChildConstraints_.add(index, value);
          onChanged();
        } else {
          timedChildConstraintsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public Builder addTimedChildConstraints(
          org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder builderForValue) {
        if (timedChildConstraintsBuilder_ == null) {
          ensureTimedChildConstraintsIsMutable();
          timedChildConstraints_.add(builderForValue.build());
          onChanged();
        } else {
          timedChildConstraintsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public Builder addTimedChildConstraints(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder builderForValue) {
        if (timedChildConstraintsBuilder_ == null) {
          ensureTimedChildConstraintsIsMutable();
          timedChildConstraints_.add(index, builderForValue.build());
          onChanged();
        } else {
          timedChildConstraintsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public Builder addAllTimedChildConstraints(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto> values) {
        if (timedChildConstraintsBuilder_ == null) {
          ensureTimedChildConstraintsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, timedChildConstraints_);
          onChanged();
        } else {
          timedChildConstraintsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public Builder clearTimedChildConstraints() {
        if (timedChildConstraintsBuilder_ == null) {
          timedChildConstraints_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
          onChanged();
        } else {
          timedChildConstraintsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public Builder removeTimedChildConstraints(int index) {
        if (timedChildConstraintsBuilder_ == null) {
          ensureTimedChildConstraintsIsMutable();
          timedChildConstraints_.remove(index);
          onChanged();
        } else {
          timedChildConstraintsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder getTimedChildConstraintsBuilder(
          int index) {
        return getTimedChildConstraintsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder getTimedChildConstraintsOrBuilder(
          int index) {
        if (timedChildConstraintsBuilder_ == null) {
          return timedChildConstraints_.get(index);  } else {
          return timedChildConstraintsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder> 
           getTimedChildConstraintsOrBuilderList() {
        if (timedChildConstraintsBuilder_ != null) {
          return timedChildConstraintsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(timedChildConstraints_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder addTimedChildConstraintsBuilder() {
        return getTimedChildConstraintsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder addTimedChildConstraintsBuilder(
          int index) {
        return getTimedChildConstraintsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.TimedPlacementConstraintProto timedChildConstraints = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder> 
           getTimedChildConstraintsBuilderList() {
        return getTimedChildConstraintsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder> 
          getTimedChildConstraintsFieldBuilder() {
        if (timedChildConstraintsBuilder_ == null) {
          timedChildConstraintsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProtoOrBuilder>(
                  timedChildConstraints_,
                  ((bitField0_ & 0x00000004) != 0),
                  getParentForChildren(),
                  isClean());
          timedChildConstraints_ = null;
        }
        return timedChildConstraintsBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.CompositePlacementConstraintProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.CompositePlacementConstraintProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<CompositePlacementConstraintProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<CompositePlacementConstraintProto>() {
      @java.lang.Override
      public CompositePlacementConstraintProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<CompositePlacementConstraintProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<CompositePlacementConstraintProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.CompositePlacementConstraintProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface PlacementConstraintMapEntryProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.PlacementConstraintMapEntryProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated string allocation_tags = 1;</code>
     * @return A list containing the allocationTags.
     */
    java.util.List<java.lang.String>
        getAllocationTagsList();
    /**
     * <code>repeated string allocation_tags = 1;</code>
     * @return The count of allocationTags.
     */
    int getAllocationTagsCount();
    /**
     * <code>repeated string allocation_tags = 1;</code>
     * @param index The index of the element to return.
     * @return The allocationTags at the given index.
     */
    java.lang.String getAllocationTags(int index);
    /**
     * <code>repeated string allocation_tags = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the allocationTags at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getAllocationTagsBytes(int index);

    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
     * @return Whether the placementConstraint field is set.
     */
    boolean hasPlacementConstraint();
    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
     * @return The placementConstraint.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint();
    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder();
  }
  /**
   * <pre>
   * This associates a set of allocation tags to a Placement Constraint.
   * </pre>
   *
   * Protobuf type {@code hadoop.yarn.PlacementConstraintMapEntryProto}
   */
  public static final class PlacementConstraintMapEntryProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.PlacementConstraintMapEntryProto)
      PlacementConstraintMapEntryProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use PlacementConstraintMapEntryProto.newBuilder() to construct.
    private PlacementConstraintMapEntryProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private PlacementConstraintMapEntryProto() {
      allocationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new PlacementConstraintMapEntryProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder.class);
    }

    private int bitField0_;
    public static final int ALLOCATION_TAGS_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList allocationTags_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string allocation_tags = 1;</code>
     * @return A list containing the allocationTags.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getAllocationTagsList() {
      return allocationTags_;
    }
    /**
     * <code>repeated string allocation_tags = 1;</code>
     * @return The count of allocationTags.
     */
    public int getAllocationTagsCount() {
      return allocationTags_.size();
    }
    /**
     * <code>repeated string allocation_tags = 1;</code>
     * @param index The index of the element to return.
     * @return The allocationTags at the given index.
     */
    public java.lang.String getAllocationTags(int index) {
      return allocationTags_.get(index);
    }
    /**
     * <code>repeated string allocation_tags = 1;</code>
     * @param index The index of the value to return.
     * @return The bytes of the allocationTags at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getAllocationTagsBytes(int index) {
      return allocationTags_.getByteString(index);
    }

    public static final int PLACEMENT_CONSTRAINT_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_;
    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
     * @return Whether the placementConstraint field is set.
     */
    @java.lang.Override
    public boolean hasPlacementConstraint() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
     * @return The placementConstraint.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() {
      return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
    }
    /**
     * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() {
      return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasPlacementConstraint()) {
        if (!getPlacementConstraint().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < allocationTags_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, allocationTags_.getRaw(i));
      }
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(2, getPlacementConstraint());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      {
        int dataSize = 0;
        for (int i = 0; i < allocationTags_.size(); i++) {
          dataSize += computeStringSizeNoTag(allocationTags_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getAllocationTagsList().size();
      }
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getPlacementConstraint());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto other = (org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto) obj;

      if (!getAllocationTagsList()
          .equals(other.getAllocationTagsList())) return false;
      if (hasPlacementConstraint() != other.hasPlacementConstraint()) return false;
      if (hasPlacementConstraint()) {
        if (!getPlacementConstraint()
            .equals(other.getPlacementConstraint())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getAllocationTagsCount() > 0) {
        hash = (37 * hash) + ALLOCATION_TAGS_FIELD_NUMBER;
        hash = (53 * hash) + getAllocationTagsList().hashCode();
      }
      if (hasPlacementConstraint()) {
        hash = (37 * hash) + PLACEMENT_CONSTRAINT_FIELD_NUMBER;
        hash = (53 * hash) + getPlacementConstraint().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * <pre>
     * This associates a set of allocation tags to a Placement Constraint.
     * </pre>
     *
     * Protobuf type {@code hadoop.yarn.PlacementConstraintMapEntryProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.PlacementConstraintMapEntryProto)
        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.class, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getPlacementConstraintFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        allocationTags_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        placementConstraint_ = null;
        if (placementConstraintBuilder_ != null) {
          placementConstraintBuilder_.dispose();
          placementConstraintBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto result = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto result) {
        int from_bitField0_ = bitField0_;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          allocationTags_.makeImmutable();
          result.allocationTags_ = allocationTags_;
        }
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.placementConstraint_ = placementConstraintBuilder_ == null
              ? placementConstraint_
              : placementConstraintBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto.getDefaultInstance()) return this;
        if (!other.allocationTags_.isEmpty()) {
          if (allocationTags_.isEmpty()) {
            allocationTags_ = other.allocationTags_;
            bitField0_ |= 0x00000001;
          } else {
            ensureAllocationTagsIsMutable();
            allocationTags_.addAll(other.allocationTags_);
          }
          onChanged();
        }
        if (other.hasPlacementConstraint()) {
          mergePlacementConstraint(other.getPlacementConstraint());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasPlacementConstraint()) {
          if (!getPlacementConstraint().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureAllocationTagsIsMutable();
                allocationTags_.add(bs);
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getPlacementConstraintFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList allocationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureAllocationTagsIsMutable() {
        if (!allocationTags_.isModifiable()) {
          allocationTags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(allocationTags_);
        }
        bitField0_ |= 0x00000001;
      }
      /**
       * <code>repeated string allocation_tags = 1;</code>
       * @return A list containing the allocationTags.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getAllocationTagsList() {
        allocationTags_.makeImmutable();
        return allocationTags_;
      }
      /**
       * <code>repeated string allocation_tags = 1;</code>
       * @return The count of allocationTags.
       */
      public int getAllocationTagsCount() {
        return allocationTags_.size();
      }
      /**
       * <code>repeated string allocation_tags = 1;</code>
       * @param index The index of the element to return.
       * @return The allocationTags at the given index.
       */
      public java.lang.String getAllocationTags(int index) {
        return allocationTags_.get(index);
      }
      /**
       * <code>repeated string allocation_tags = 1;</code>
       * @param index The index of the value to return.
       * @return The bytes of the allocationTags at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getAllocationTagsBytes(int index) {
        return allocationTags_.getByteString(index);
      }
      /**
       * <code>repeated string allocation_tags = 1;</code>
       * @param index The index to set the value at.
       * @param value The allocationTags to set.
       * @return This builder for chaining.
       */
      public Builder setAllocationTags(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAllocationTagsIsMutable();
        allocationTags_.set(index, value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocation_tags = 1;</code>
       * @param value The allocationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllocationTags(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAllocationTagsIsMutable();
        allocationTags_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocation_tags = 1;</code>
       * @param values The allocationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllAllocationTags(
          java.lang.Iterable<java.lang.String> values) {
        ensureAllocationTagsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, allocationTags_);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocation_tags = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearAllocationTags() {
        allocationTags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000001);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string allocation_tags = 1;</code>
       * @param value The bytes of the allocationTags to add.
       * @return This builder for chaining.
       */
      public Builder addAllocationTagsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureAllocationTagsIsMutable();
        allocationTags_.add(value);
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto placementConstraint_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> placementConstraintBuilder_;
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
       * @return Whether the placementConstraint field is set.
       */
      public boolean hasPlacementConstraint() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
       * @return The placementConstraint.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto getPlacementConstraint() {
        if (placementConstraintBuilder_ == null) {
          return placementConstraint_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
        } else {
          return placementConstraintBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
       */
      public Builder setPlacementConstraint(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) {
        if (placementConstraintBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          placementConstraint_ = value;
        } else {
          placementConstraintBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
       */
      public Builder setPlacementConstraint(
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder builderForValue) {
        if (placementConstraintBuilder_ == null) {
          placementConstraint_ = builderForValue.build();
        } else {
          placementConstraintBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
       */
      public Builder mergePlacementConstraint(org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto value) {
        if (placementConstraintBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            placementConstraint_ != null &&
            placementConstraint_ != org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance()) {
            getPlacementConstraintBuilder().mergeFrom(value);
          } else {
            placementConstraint_ = value;
          }
        } else {
          placementConstraintBuilder_.mergeFrom(value);
        }
        if (placementConstraint_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
       */
      public Builder clearPlacementConstraint() {
        bitField0_ = (bitField0_ & ~0x00000002);
        placementConstraint_ = null;
        if (placementConstraintBuilder_ != null) {
          placementConstraintBuilder_.dispose();
          placementConstraintBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder getPlacementConstraintBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getPlacementConstraintFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder getPlacementConstraintOrBuilder() {
        if (placementConstraintBuilder_ != null) {
          return placementConstraintBuilder_.getMessageOrBuilder();
        } else {
          return placementConstraint_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.getDefaultInstance() : placementConstraint_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PlacementConstraintProto placement_constraint = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder> 
          getPlacementConstraintFieldBuilder() {
        if (placementConstraintBuilder_ == null) {
          placementConstraintBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintProtoOrBuilder>(
                  getPlacementConstraint(),
                  getParentForChildren(),
                  isClean());
          placementConstraint_ = null;
        }
        return placementConstraintBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.PlacementConstraintMapEntryProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.PlacementConstraintMapEntryProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<PlacementConstraintMapEntryProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<PlacementConstraintMapEntryProto>() {
      @java.lang.Override
      public PlacementConstraintMapEntryProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<PlacementConstraintMapEntryProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<PlacementConstraintMapEntryProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PlacementConstraintMapEntryProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationIdProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationIdProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int64 id = 1;</code>
     * @return Whether the id field is set.
     */
    boolean hasId();
    /**
     * <code>optional int64 id = 1;</code>
     * @return The id.
     */
    long getId();

    /**
     * <code>optional int64 cluster_timestamp = 2;</code>
     * @return Whether the clusterTimestamp field is set.
     */
    boolean hasClusterTimestamp();
    /**
     * <code>optional int64 cluster_timestamp = 2;</code>
     * @return The clusterTimestamp.
     */
    long getClusterTimestamp();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationIdProto}
   */
  public static final class ReservationIdProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationIdProto)
      ReservationIdProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationIdProto.newBuilder() to construct.
    private ReservationIdProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationIdProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationIdProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationIdProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationIdProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder.class);
    }

    private int bitField0_;
    public static final int ID_FIELD_NUMBER = 1;
    private long id_ = 0L;
    /**
     * <code>optional int64 id = 1;</code>
     * @return Whether the id field is set.
     */
    @java.lang.Override
    public boolean hasId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int64 id = 1;</code>
     * @return The id.
     */
    @java.lang.Override
    public long getId() {
      return id_;
    }

    public static final int CLUSTER_TIMESTAMP_FIELD_NUMBER = 2;
    private long clusterTimestamp_ = 0L;
    /**
     * <code>optional int64 cluster_timestamp = 2;</code>
     * @return Whether the clusterTimestamp field is set.
     */
    @java.lang.Override
    public boolean hasClusterTimestamp() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 cluster_timestamp = 2;</code>
     * @return The clusterTimestamp.
     */
    @java.lang.Override
    public long getClusterTimestamp() {
      return clusterTimestamp_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt64(1, id_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, clusterTimestamp_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(1, id_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, clusterTimestamp_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto) obj;

      if (hasId() != other.hasId()) return false;
      if (hasId()) {
        if (getId()
            != other.getId()) return false;
      }
      if (hasClusterTimestamp() != other.hasClusterTimestamp()) return false;
      if (hasClusterTimestamp()) {
        if (getClusterTimestamp()
            != other.getClusterTimestamp()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getId());
      }
      if (hasClusterTimestamp()) {
        hash = (37 * hash) + CLUSTER_TIMESTAMP_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getClusterTimestamp());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationIdProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationIdProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationIdProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationIdProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        id_ = 0L;
        clusterTimestamp_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationIdProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.id_ = id_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.clusterTimestamp_ = clusterTimestamp_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) return this;
        if (other.hasId()) {
          setId(other.getId());
        }
        if (other.hasClusterTimestamp()) {
          setClusterTimestamp(other.getClusterTimestamp());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                id_ = input.readInt64();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 16: {
                clusterTimestamp_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private long id_ ;
      /**
       * <code>optional int64 id = 1;</code>
       * @return Whether the id field is set.
       */
      @java.lang.Override
      public boolean hasId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int64 id = 1;</code>
       * @return The id.
       */
      @java.lang.Override
      public long getId() {
        return id_;
      }
      /**
       * <code>optional int64 id = 1;</code>
       * @param value The id to set.
       * @return This builder for chaining.
       */
      public Builder setId(long value) {

        id_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 id = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        id_ = 0L;
        onChanged();
        return this;
      }

      private long clusterTimestamp_ ;
      /**
       * <code>optional int64 cluster_timestamp = 2;</code>
       * @return Whether the clusterTimestamp field is set.
       */
      @java.lang.Override
      public boolean hasClusterTimestamp() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int64 cluster_timestamp = 2;</code>
       * @return The clusterTimestamp.
       */
      @java.lang.Override
      public long getClusterTimestamp() {
        return clusterTimestamp_;
      }
      /**
       * <code>optional int64 cluster_timestamp = 2;</code>
       * @param value The clusterTimestamp to set.
       * @return This builder for chaining.
       */
      public Builder setClusterTimestamp(long value) {

        clusterTimestamp_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 cluster_timestamp = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearClusterTimestamp() {
        bitField0_ = (bitField0_ & ~0x00000002);
        clusterTimestamp_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationIdProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationIdProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationIdProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationIdProto>() {
      @java.lang.Override
      public ReservationIdProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationIdProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationIdProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
     * @return Whether the capability field is set.
     */
    boolean hasCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
     * @return The capability.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder();

    /**
     * <code>optional int32 num_containers = 2 [default = 1];</code>
     * @return Whether the numContainers field is set.
     */
    boolean hasNumContainers();
    /**
     * <code>optional int32 num_containers = 2 [default = 1];</code>
     * @return The numContainers.
     */
    int getNumContainers();

    /**
     * <code>optional int32 concurrency = 3 [default = 1];</code>
     * @return Whether the concurrency field is set.
     */
    boolean hasConcurrency();
    /**
     * <code>optional int32 concurrency = 3 [default = 1];</code>
     * @return The concurrency.
     */
    int getConcurrency();

    /**
     * <code>optional int64 duration = 4 [default = -1];</code>
     * @return Whether the duration field is set.
     */
    boolean hasDuration();
    /**
     * <code>optional int64 duration = 4 [default = -1];</code>
     * @return The duration.
     */
    long getDuration();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationRequestProto}
   */
  public static final class ReservationRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationRequestProto)
      ReservationRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationRequestProto.newBuilder() to construct.
    private ReservationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationRequestProto() {
      numContainers_ = 1;
      concurrency_ = 1;
      duration_ = -1L;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int CAPABILITY_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
     * @return Whether the capability field is set.
     */
    @java.lang.Override
    public boolean hasCapability() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
     * @return The capability.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() {
      return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() {
      return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
    }

    public static final int NUM_CONTAINERS_FIELD_NUMBER = 2;
    private int numContainers_ = 1;
    /**
     * <code>optional int32 num_containers = 2 [default = 1];</code>
     * @return Whether the numContainers field is set.
     */
    @java.lang.Override
    public boolean hasNumContainers() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 num_containers = 2 [default = 1];</code>
     * @return The numContainers.
     */
    @java.lang.Override
    public int getNumContainers() {
      return numContainers_;
    }

    public static final int CONCURRENCY_FIELD_NUMBER = 3;
    private int concurrency_ = 1;
    /**
     * <code>optional int32 concurrency = 3 [default = 1];</code>
     * @return Whether the concurrency field is set.
     */
    @java.lang.Override
    public boolean hasConcurrency() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int32 concurrency = 3 [default = 1];</code>
     * @return The concurrency.
     */
    @java.lang.Override
    public int getConcurrency() {
      return concurrency_;
    }

    public static final int DURATION_FIELD_NUMBER = 4;
    private long duration_ = -1L;
    /**
     * <code>optional int64 duration = 4 [default = -1];</code>
     * @return Whether the duration field is set.
     */
    @java.lang.Override
    public boolean hasDuration() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional int64 duration = 4 [default = -1];</code>
     * @return The duration.
     */
    @java.lang.Override
    public long getDuration() {
      return duration_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasCapability()) {
        if (!getCapability().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getCapability());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(2, numContainers_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt32(3, concurrency_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt64(4, duration_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getCapability());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(2, numContainers_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(3, concurrency_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(4, duration_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto) obj;

      if (hasCapability() != other.hasCapability()) return false;
      if (hasCapability()) {
        if (!getCapability()
            .equals(other.getCapability())) return false;
      }
      if (hasNumContainers() != other.hasNumContainers()) return false;
      if (hasNumContainers()) {
        if (getNumContainers()
            != other.getNumContainers()) return false;
      }
      if (hasConcurrency() != other.hasConcurrency()) return false;
      if (hasConcurrency()) {
        if (getConcurrency()
            != other.getConcurrency()) return false;
      }
      if (hasDuration() != other.hasDuration()) return false;
      if (hasDuration()) {
        if (getDuration()
            != other.getDuration()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasCapability()) {
        hash = (37 * hash) + CAPABILITY_FIELD_NUMBER;
        hash = (53 * hash) + getCapability().hashCode();
      }
      if (hasNumContainers()) {
        hash = (37 * hash) + NUM_CONTAINERS_FIELD_NUMBER;
        hash = (53 * hash) + getNumContainers();
      }
      if (hasConcurrency()) {
        hash = (37 * hash) + CONCURRENCY_FIELD_NUMBER;
        hash = (53 * hash) + getConcurrency();
      }
      if (hasDuration()) {
        hash = (37 * hash) + DURATION_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getDuration());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationRequestProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getCapabilityFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        capability_ = null;
        if (capabilityBuilder_ != null) {
          capabilityBuilder_.dispose();
          capabilityBuilder_ = null;
        }
        numContainers_ = 1;
        concurrency_ = 1;
        duration_ = -1L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.capability_ = capabilityBuilder_ == null
              ? capability_
              : capabilityBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.numContainers_ = numContainers_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.concurrency_ = concurrency_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.duration_ = duration_;
          to_bitField0_ |= 0x00000008;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.getDefaultInstance()) return this;
        if (other.hasCapability()) {
          mergeCapability(other.getCapability());
        }
        if (other.hasNumContainers()) {
          setNumContainers(other.getNumContainers());
        }
        if (other.hasConcurrency()) {
          setConcurrency(other.getConcurrency());
        }
        if (other.hasDuration()) {
          setDuration(other.getDuration());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasCapability()) {
          if (!getCapability().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getCapabilityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                numContainers_ = input.readInt32();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 24: {
                concurrency_ = input.readInt32();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 32: {
                duration_ = input.readInt64();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> capabilityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
       * @return Whether the capability field is set.
       */
      public boolean hasCapability() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
       * @return The capability.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() {
        if (capabilityBuilder_ == null) {
          return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
        } else {
          return capabilityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
       */
      public Builder setCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (capabilityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          capability_ = value;
        } else {
          capabilityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
       */
      public Builder setCapability(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (capabilityBuilder_ == null) {
          capability_ = builderForValue.build();
        } else {
          capabilityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
       */
      public Builder mergeCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (capabilityBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            capability_ != null &&
            capability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getCapabilityBuilder().mergeFrom(value);
          } else {
            capability_ = value;
          }
        } else {
          capabilityBuilder_.mergeFrom(value);
        }
        if (capability_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
       */
      public Builder clearCapability() {
        bitField0_ = (bitField0_ & ~0x00000001);
        capability_ = null;
        if (capabilityBuilder_ != null) {
          capabilityBuilder_.dispose();
          capabilityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getCapabilityBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getCapabilityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() {
        if (capabilityBuilder_ != null) {
          return capabilityBuilder_.getMessageOrBuilder();
        } else {
          return capability_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getCapabilityFieldBuilder() {
        if (capabilityBuilder_ == null) {
          capabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getCapability(),
                  getParentForChildren(),
                  isClean());
          capability_ = null;
        }
        return capabilityBuilder_;
      }

      private int numContainers_ = 1;
      /**
       * <code>optional int32 num_containers = 2 [default = 1];</code>
       * @return Whether the numContainers field is set.
       */
      @java.lang.Override
      public boolean hasNumContainers() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int32 num_containers = 2 [default = 1];</code>
       * @return The numContainers.
       */
      @java.lang.Override
      public int getNumContainers() {
        return numContainers_;
      }
      /**
       * <code>optional int32 num_containers = 2 [default = 1];</code>
       * @param value The numContainers to set.
       * @return This builder for chaining.
       */
      public Builder setNumContainers(int value) {

        numContainers_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 num_containers = 2 [default = 1];</code>
       * @return This builder for chaining.
       */
      public Builder clearNumContainers() {
        bitField0_ = (bitField0_ & ~0x00000002);
        numContainers_ = 1;
        onChanged();
        return this;
      }

      private int concurrency_ = 1;
      /**
       * <code>optional int32 concurrency = 3 [default = 1];</code>
       * @return Whether the concurrency field is set.
       */
      @java.lang.Override
      public boolean hasConcurrency() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int32 concurrency = 3 [default = 1];</code>
       * @return The concurrency.
       */
      @java.lang.Override
      public int getConcurrency() {
        return concurrency_;
      }
      /**
       * <code>optional int32 concurrency = 3 [default = 1];</code>
       * @param value The concurrency to set.
       * @return This builder for chaining.
       */
      public Builder setConcurrency(int value) {

        concurrency_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 concurrency = 3 [default = 1];</code>
       * @return This builder for chaining.
       */
      public Builder clearConcurrency() {
        bitField0_ = (bitField0_ & ~0x00000004);
        concurrency_ = 1;
        onChanged();
        return this;
      }

      private long duration_ = -1L;
      /**
       * <code>optional int64 duration = 4 [default = -1];</code>
       * @return Whether the duration field is set.
       */
      @java.lang.Override
      public boolean hasDuration() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int64 duration = 4 [default = -1];</code>
       * @return The duration.
       */
      @java.lang.Override
      public long getDuration() {
        return duration_;
      }
      /**
       * <code>optional int64 duration = 4 [default = -1];</code>
       * @param value The duration to set.
       * @return This builder for chaining.
       */
      public Builder setDuration(long value) {

        duration_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 duration = 4 [default = -1];</code>
       * @return This builder for chaining.
       */
      public Builder clearDuration() {
        bitField0_ = (bitField0_ & ~0x00000008);
        duration_ = -1L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationRequestProto>() {
      @java.lang.Override
      public ReservationRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationRequestsProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationRequestsProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto> 
        getReservationResourcesList();
    /**
     * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getReservationResources(int index);
    /**
     * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
     */
    int getReservationResourcesCount();
    /**
     * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder> 
        getReservationResourcesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder getReservationResourcesOrBuilder(
        int index);

    /**
     * <code>optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL];</code>
     * @return Whether the interpreter field is set.
     */
    boolean hasInterpreter();
    /**
     * <code>optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL];</code>
     * @return The interpreter.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto getInterpreter();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationRequestsProto}
   */
  public static final class ReservationRequestsProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationRequestsProto)
      ReservationRequestsProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationRequestsProto.newBuilder() to construct.
    private ReservationRequestsProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationRequestsProto() {
      reservationResources_ = java.util.Collections.emptyList();
      interpreter_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationRequestsProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestsProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestsProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESERVATION_RESOURCES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto> reservationResources_;
    /**
     * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto> getReservationResourcesList() {
      return reservationResources_;
    }
    /**
     * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder> 
        getReservationResourcesOrBuilderList() {
      return reservationResources_;
    }
    /**
     * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
     */
    @java.lang.Override
    public int getReservationResourcesCount() {
      return reservationResources_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getReservationResources(int index) {
      return reservationResources_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder getReservationResourcesOrBuilder(
        int index) {
      return reservationResources_.get(index);
    }

    public static final int INTERPRETER_FIELD_NUMBER = 2;
    private int interpreter_ = 1;
    /**
     * <code>optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL];</code>
     * @return Whether the interpreter field is set.
     */
    @java.lang.Override public boolean hasInterpreter() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL];</code>
     * @return The interpreter.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto getInterpreter() {
      org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto result = org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto.forNumber(interpreter_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto.R_ALL : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      for (int i = 0; i < getReservationResourcesCount(); i++) {
        if (!getReservationResources(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < reservationResources_.size(); i++) {
        output.writeMessage(1, reservationResources_.get(i));
      }
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(2, interpreter_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < reservationResources_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, reservationResources_.get(i));
      }
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(2, interpreter_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto) obj;

      if (!getReservationResourcesList()
          .equals(other.getReservationResourcesList())) return false;
      if (hasInterpreter() != other.hasInterpreter()) return false;
      if (hasInterpreter()) {
        if (interpreter_ != other.interpreter_) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getReservationResourcesCount() > 0) {
        hash = (37 * hash) + RESERVATION_RESOURCES_FIELD_NUMBER;
        hash = (53 * hash) + getReservationResourcesList().hashCode();
      }
      if (hasInterpreter()) {
        hash = (37 * hash) + INTERPRETER_FIELD_NUMBER;
        hash = (53 * hash) + interpreter_;
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationRequestsProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationRequestsProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestsProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestsProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (reservationResourcesBuilder_ == null) {
          reservationResources_ = java.util.Collections.emptyList();
        } else {
          reservationResources_ = null;
          reservationResourcesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        interpreter_ = 1;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationRequestsProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto result) {
        if (reservationResourcesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            reservationResources_ = java.util.Collections.unmodifiableList(reservationResources_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.reservationResources_ = reservationResources_;
        } else {
          result.reservationResources_ = reservationResourcesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.interpreter_ = interpreter_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance()) return this;
        if (reservationResourcesBuilder_ == null) {
          if (!other.reservationResources_.isEmpty()) {
            if (reservationResources_.isEmpty()) {
              reservationResources_ = other.reservationResources_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureReservationResourcesIsMutable();
              reservationResources_.addAll(other.reservationResources_);
            }
            onChanged();
          }
        } else {
          if (!other.reservationResources_.isEmpty()) {
            if (reservationResourcesBuilder_.isEmpty()) {
              reservationResourcesBuilder_.dispose();
              reservationResourcesBuilder_ = null;
              reservationResources_ = other.reservationResources_;
              bitField0_ = (bitField0_ & ~0x00000001);
              reservationResourcesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getReservationResourcesFieldBuilder() : null;
            } else {
              reservationResourcesBuilder_.addAllMessages(other.reservationResources_);
            }
          }
        }
        if (other.hasInterpreter()) {
          setInterpreter(other.getInterpreter());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        for (int i = 0; i < getReservationResourcesCount(); i++) {
          if (!getReservationResources(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.PARSER,
                        extensionRegistry);
                if (reservationResourcesBuilder_ == null) {
                  ensureReservationResourcesIsMutable();
                  reservationResources_.add(m);
                } else {
                  reservationResourcesBuilder_.addMessage(m);
                }
                break;
              } // case 10
              case 16: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(2, tmpRaw);
                } else {
                  interpreter_ = tmpRaw;
                  bitField0_ |= 0x00000002;
                }
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto> reservationResources_ =
        java.util.Collections.emptyList();
      private void ensureReservationResourcesIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          reservationResources_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto>(reservationResources_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder> reservationResourcesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto> getReservationResourcesList() {
        if (reservationResourcesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(reservationResources_);
        } else {
          return reservationResourcesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public int getReservationResourcesCount() {
        if (reservationResourcesBuilder_ == null) {
          return reservationResources_.size();
        } else {
          return reservationResourcesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto getReservationResources(int index) {
        if (reservationResourcesBuilder_ == null) {
          return reservationResources_.get(index);
        } else {
          return reservationResourcesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public Builder setReservationResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto value) {
        if (reservationResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureReservationResourcesIsMutable();
          reservationResources_.set(index, value);
          onChanged();
        } else {
          reservationResourcesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public Builder setReservationResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder builderForValue) {
        if (reservationResourcesBuilder_ == null) {
          ensureReservationResourcesIsMutable();
          reservationResources_.set(index, builderForValue.build());
          onChanged();
        } else {
          reservationResourcesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public Builder addReservationResources(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto value) {
        if (reservationResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureReservationResourcesIsMutable();
          reservationResources_.add(value);
          onChanged();
        } else {
          reservationResourcesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public Builder addReservationResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto value) {
        if (reservationResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureReservationResourcesIsMutable();
          reservationResources_.add(index, value);
          onChanged();
        } else {
          reservationResourcesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public Builder addReservationResources(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder builderForValue) {
        if (reservationResourcesBuilder_ == null) {
          ensureReservationResourcesIsMutable();
          reservationResources_.add(builderForValue.build());
          onChanged();
        } else {
          reservationResourcesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public Builder addReservationResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder builderForValue) {
        if (reservationResourcesBuilder_ == null) {
          ensureReservationResourcesIsMutable();
          reservationResources_.add(index, builderForValue.build());
          onChanged();
        } else {
          reservationResourcesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public Builder addAllReservationResources(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto> values) {
        if (reservationResourcesBuilder_ == null) {
          ensureReservationResourcesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, reservationResources_);
          onChanged();
        } else {
          reservationResourcesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public Builder clearReservationResources() {
        if (reservationResourcesBuilder_ == null) {
          reservationResources_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          reservationResourcesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public Builder removeReservationResources(int index) {
        if (reservationResourcesBuilder_ == null) {
          ensureReservationResourcesIsMutable();
          reservationResources_.remove(index);
          onChanged();
        } else {
          reservationResourcesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder getReservationResourcesBuilder(
          int index) {
        return getReservationResourcesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder getReservationResourcesOrBuilder(
          int index) {
        if (reservationResourcesBuilder_ == null) {
          return reservationResources_.get(index);  } else {
          return reservationResourcesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder> 
           getReservationResourcesOrBuilderList() {
        if (reservationResourcesBuilder_ != null) {
          return reservationResourcesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(reservationResources_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder addReservationResourcesBuilder() {
        return getReservationResourcesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder addReservationResourcesBuilder(
          int index) {
        return getReservationResourcesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ReservationRequestProto reservation_resources = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder> 
           getReservationResourcesBuilderList() {
        return getReservationResourcesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder> 
          getReservationResourcesFieldBuilder() {
        if (reservationResourcesBuilder_ == null) {
          reservationResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestProtoOrBuilder>(
                  reservationResources_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          reservationResources_ = null;
        }
        return reservationResourcesBuilder_;
      }

      private int interpreter_ = 1;
      /**
       * <code>optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL];</code>
       * @return Whether the interpreter field is set.
       */
      @java.lang.Override public boolean hasInterpreter() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL];</code>
       * @return The interpreter.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto getInterpreter() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto result = org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto.forNumber(interpreter_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto.R_ALL : result;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL];</code>
       * @param value The interpreter to set.
       * @return This builder for chaining.
       */
      public Builder setInterpreter(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestInterpreterProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        interpreter_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestInterpreterProto interpreter = 2 [default = R_ALL];</code>
       * @return This builder for chaining.
       */
      public Builder clearInterpreter() {
        bitField0_ = (bitField0_ & ~0x00000002);
        interpreter_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationRequestsProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationRequestsProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationRequestsProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationRequestsProto>() {
      @java.lang.Override
      public ReservationRequestsProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationRequestsProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationRequestsProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationDefinitionProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationDefinitionProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
     * @return Whether the reservationRequests field is set.
     */
    boolean hasReservationRequests();
    /**
     * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
     * @return The reservationRequests.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getReservationRequests();
    /**
     * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder getReservationRequestsOrBuilder();

    /**
     * <code>optional int64 arrival = 2;</code>
     * @return Whether the arrival field is set.
     */
    boolean hasArrival();
    /**
     * <code>optional int64 arrival = 2;</code>
     * @return The arrival.
     */
    long getArrival();

    /**
     * <code>optional int64 deadline = 3;</code>
     * @return Whether the deadline field is set.
     */
    boolean hasDeadline();
    /**
     * <code>optional int64 deadline = 3;</code>
     * @return The deadline.
     */
    long getDeadline();

    /**
     * <code>optional string reservation_name = 4;</code>
     * @return Whether the reservationName field is set.
     */
    boolean hasReservationName();
    /**
     * <code>optional string reservation_name = 4;</code>
     * @return The reservationName.
     */
    java.lang.String getReservationName();
    /**
     * <code>optional string reservation_name = 4;</code>
     * @return The bytes for reservationName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getReservationNameBytes();

    /**
     * <code>optional string recurrence_expression = 5 [default = "0"];</code>
     * @return Whether the recurrenceExpression field is set.
     */
    boolean hasRecurrenceExpression();
    /**
     * <code>optional string recurrence_expression = 5 [default = "0"];</code>
     * @return The recurrenceExpression.
     */
    java.lang.String getRecurrenceExpression();
    /**
     * <code>optional string recurrence_expression = 5 [default = "0"];</code>
     * @return The bytes for recurrenceExpression.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getRecurrenceExpressionBytes();

    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
     * @return Whether the priority field is set.
     */
    boolean hasPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
     * @return The priority.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority();
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationDefinitionProto}
   */
  public static final class ReservationDefinitionProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationDefinitionProto)
      ReservationDefinitionProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationDefinitionProto.newBuilder() to construct.
    private ReservationDefinitionProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationDefinitionProto() {
      reservationName_ = "";
      recurrenceExpression_ = "0";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationDefinitionProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationDefinitionProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESERVATION_REQUESTS_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto reservationRequests_;
    /**
     * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
     * @return Whether the reservationRequests field is set.
     */
    @java.lang.Override
    public boolean hasReservationRequests() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
     * @return The reservationRequests.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getReservationRequests() {
      return reservationRequests_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance() : reservationRequests_;
    }
    /**
     * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder getReservationRequestsOrBuilder() {
      return reservationRequests_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance() : reservationRequests_;
    }

    public static final int ARRIVAL_FIELD_NUMBER = 2;
    private long arrival_ = 0L;
    /**
     * <code>optional int64 arrival = 2;</code>
     * @return Whether the arrival field is set.
     */
    @java.lang.Override
    public boolean hasArrival() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 arrival = 2;</code>
     * @return The arrival.
     */
    @java.lang.Override
    public long getArrival() {
      return arrival_;
    }

    public static final int DEADLINE_FIELD_NUMBER = 3;
    private long deadline_ = 0L;
    /**
     * <code>optional int64 deadline = 3;</code>
     * @return Whether the deadline field is set.
     */
    @java.lang.Override
    public boolean hasDeadline() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int64 deadline = 3;</code>
     * @return The deadline.
     */
    @java.lang.Override
    public long getDeadline() {
      return deadline_;
    }

    public static final int RESERVATION_NAME_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private volatile java.lang.Object reservationName_ = "";
    /**
     * <code>optional string reservation_name = 4;</code>
     * @return Whether the reservationName field is set.
     */
    @java.lang.Override
    public boolean hasReservationName() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional string reservation_name = 4;</code>
     * @return The reservationName.
     */
    @java.lang.Override
    public java.lang.String getReservationName() {
      java.lang.Object ref = reservationName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          reservationName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string reservation_name = 4;</code>
     * @return The bytes for reservationName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getReservationNameBytes() {
      java.lang.Object ref = reservationName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        reservationName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int RECURRENCE_EXPRESSION_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private volatile java.lang.Object recurrenceExpression_ = "0";
    /**
     * <code>optional string recurrence_expression = 5 [default = "0"];</code>
     * @return Whether the recurrenceExpression field is set.
     */
    @java.lang.Override
    public boolean hasRecurrenceExpression() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional string recurrence_expression = 5 [default = "0"];</code>
     * @return The recurrenceExpression.
     */
    @java.lang.Override
    public java.lang.String getRecurrenceExpression() {
      java.lang.Object ref = recurrenceExpression_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          recurrenceExpression_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string recurrence_expression = 5 [default = "0"];</code>
     * @return The bytes for recurrenceExpression.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getRecurrenceExpressionBytes() {
      java.lang.Object ref = recurrenceExpression_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        recurrenceExpression_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int PRIORITY_FIELD_NUMBER = 6;
    private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
     * @return Whether the priority field is set.
     */
    @java.lang.Override
    public boolean hasPriority() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
     * @return The priority.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }
    /**
     * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
      return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasReservationRequests()) {
        if (!getReservationRequests().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getReservationRequests());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, arrival_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(3, deadline_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, reservationName_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, recurrenceExpression_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeMessage(6, getPriority());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getReservationRequests());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, arrival_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(3, deadline_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, reservationName_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, recurrenceExpression_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(6, getPriority());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto) obj;

      if (hasReservationRequests() != other.hasReservationRequests()) return false;
      if (hasReservationRequests()) {
        if (!getReservationRequests()
            .equals(other.getReservationRequests())) return false;
      }
      if (hasArrival() != other.hasArrival()) return false;
      if (hasArrival()) {
        if (getArrival()
            != other.getArrival()) return false;
      }
      if (hasDeadline() != other.hasDeadline()) return false;
      if (hasDeadline()) {
        if (getDeadline()
            != other.getDeadline()) return false;
      }
      if (hasReservationName() != other.hasReservationName()) return false;
      if (hasReservationName()) {
        if (!getReservationName()
            .equals(other.getReservationName())) return false;
      }
      if (hasRecurrenceExpression() != other.hasRecurrenceExpression()) return false;
      if (hasRecurrenceExpression()) {
        if (!getRecurrenceExpression()
            .equals(other.getRecurrenceExpression())) return false;
      }
      if (hasPriority() != other.hasPriority()) return false;
      if (hasPriority()) {
        if (!getPriority()
            .equals(other.getPriority())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasReservationRequests()) {
        hash = (37 * hash) + RESERVATION_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getReservationRequests().hashCode();
      }
      if (hasArrival()) {
        hash = (37 * hash) + ARRIVAL_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getArrival());
      }
      if (hasDeadline()) {
        hash = (37 * hash) + DEADLINE_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getDeadline());
      }
      if (hasReservationName()) {
        hash = (37 * hash) + RESERVATION_NAME_FIELD_NUMBER;
        hash = (53 * hash) + getReservationName().hashCode();
      }
      if (hasRecurrenceExpression()) {
        hash = (37 * hash) + RECURRENCE_EXPRESSION_FIELD_NUMBER;
        hash = (53 * hash) + getRecurrenceExpression().hashCode();
      }
      if (hasPriority()) {
        hash = (37 * hash) + PRIORITY_FIELD_NUMBER;
        hash = (53 * hash) + getPriority().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationDefinitionProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationDefinitionProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationDefinitionProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getReservationRequestsFieldBuilder();
          getPriorityFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        reservationRequests_ = null;
        if (reservationRequestsBuilder_ != null) {
          reservationRequestsBuilder_.dispose();
          reservationRequestsBuilder_ = null;
        }
        arrival_ = 0L;
        deadline_ = 0L;
        reservationName_ = "";
        recurrenceExpression_ = "0";
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.reservationRequests_ = reservationRequestsBuilder_ == null
              ? reservationRequests_
              : reservationRequestsBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.arrival_ = arrival_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.deadline_ = deadline_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.reservationName_ = reservationName_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.recurrenceExpression_ = recurrenceExpression_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.priority_ = priorityBuilder_ == null
              ? priority_
              : priorityBuilder_.build();
          to_bitField0_ |= 0x00000020;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance()) return this;
        if (other.hasReservationRequests()) {
          mergeReservationRequests(other.getReservationRequests());
        }
        if (other.hasArrival()) {
          setArrival(other.getArrival());
        }
        if (other.hasDeadline()) {
          setDeadline(other.getDeadline());
        }
        if (other.hasReservationName()) {
          reservationName_ = other.reservationName_;
          bitField0_ |= 0x00000008;
          onChanged();
        }
        if (other.hasRecurrenceExpression()) {
          recurrenceExpression_ = other.recurrenceExpression_;
          bitField0_ |= 0x00000010;
          onChanged();
        }
        if (other.hasPriority()) {
          mergePriority(other.getPriority());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasReservationRequests()) {
          if (!getReservationRequests().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getReservationRequestsFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                arrival_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 24: {
                deadline_ = input.readInt64();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 34: {
                reservationName_ = input.readBytes();
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                recurrenceExpression_ = input.readBytes();
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 50: {
                input.readMessage(
                    getPriorityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000020;
                break;
              } // case 50
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto reservationRequests_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder> reservationRequestsBuilder_;
      /**
       * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
       * @return Whether the reservationRequests field is set.
       */
      public boolean hasReservationRequests() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
       * @return The reservationRequests.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto getReservationRequests() {
        if (reservationRequestsBuilder_ == null) {
          return reservationRequests_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance() : reservationRequests_;
        } else {
          return reservationRequestsBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
       */
      public Builder setReservationRequests(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto value) {
        if (reservationRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservationRequests_ = value;
        } else {
          reservationRequestsBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
       */
      public Builder setReservationRequests(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder builderForValue) {
        if (reservationRequestsBuilder_ == null) {
          reservationRequests_ = builderForValue.build();
        } else {
          reservationRequestsBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
       */
      public Builder mergeReservationRequests(org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto value) {
        if (reservationRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            reservationRequests_ != null &&
            reservationRequests_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance()) {
            getReservationRequestsBuilder().mergeFrom(value);
          } else {
            reservationRequests_ = value;
          }
        } else {
          reservationRequestsBuilder_.mergeFrom(value);
        }
        if (reservationRequests_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
       */
      public Builder clearReservationRequests() {
        bitField0_ = (bitField0_ & ~0x00000001);
        reservationRequests_ = null;
        if (reservationRequestsBuilder_ != null) {
          reservationRequestsBuilder_.dispose();
          reservationRequestsBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder getReservationRequestsBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getReservationRequestsFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder getReservationRequestsOrBuilder() {
        if (reservationRequestsBuilder_ != null) {
          return reservationRequestsBuilder_.getMessageOrBuilder();
        } else {
          return reservationRequests_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.getDefaultInstance() : reservationRequests_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationRequestsProto reservation_requests = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder> 
          getReservationRequestsFieldBuilder() {
        if (reservationRequestsBuilder_ == null) {
          reservationRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationRequestsProtoOrBuilder>(
                  getReservationRequests(),
                  getParentForChildren(),
                  isClean());
          reservationRequests_ = null;
        }
        return reservationRequestsBuilder_;
      }

      private long arrival_ ;
      /**
       * <code>optional int64 arrival = 2;</code>
       * @return Whether the arrival field is set.
       */
      @java.lang.Override
      public boolean hasArrival() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int64 arrival = 2;</code>
       * @return The arrival.
       */
      @java.lang.Override
      public long getArrival() {
        return arrival_;
      }
      /**
       * <code>optional int64 arrival = 2;</code>
       * @param value The arrival to set.
       * @return This builder for chaining.
       */
      public Builder setArrival(long value) {

        arrival_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 arrival = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearArrival() {
        bitField0_ = (bitField0_ & ~0x00000002);
        arrival_ = 0L;
        onChanged();
        return this;
      }

      private long deadline_ ;
      /**
       * <code>optional int64 deadline = 3;</code>
       * @return Whether the deadline field is set.
       */
      @java.lang.Override
      public boolean hasDeadline() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int64 deadline = 3;</code>
       * @return The deadline.
       */
      @java.lang.Override
      public long getDeadline() {
        return deadline_;
      }
      /**
       * <code>optional int64 deadline = 3;</code>
       * @param value The deadline to set.
       * @return This builder for chaining.
       */
      public Builder setDeadline(long value) {

        deadline_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 deadline = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearDeadline() {
        bitField0_ = (bitField0_ & ~0x00000004);
        deadline_ = 0L;
        onChanged();
        return this;
      }

      private java.lang.Object reservationName_ = "";
      /**
       * <code>optional string reservation_name = 4;</code>
       * @return Whether the reservationName field is set.
       */
      public boolean hasReservationName() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional string reservation_name = 4;</code>
       * @return The reservationName.
       */
      public java.lang.String getReservationName() {
        java.lang.Object ref = reservationName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            reservationName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string reservation_name = 4;</code>
       * @return The bytes for reservationName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getReservationNameBytes() {
        java.lang.Object ref = reservationName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          reservationName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string reservation_name = 4;</code>
       * @param value The reservationName to set.
       * @return This builder for chaining.
       */
      public Builder setReservationName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        reservationName_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional string reservation_name = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearReservationName() {
        reservationName_ = getDefaultInstance().getReservationName();
        bitField0_ = (bitField0_ & ~0x00000008);
        onChanged();
        return this;
      }
      /**
       * <code>optional string reservation_name = 4;</code>
       * @param value The bytes for reservationName to set.
       * @return This builder for chaining.
       */
      public Builder setReservationNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        reservationName_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }

      private java.lang.Object recurrenceExpression_ = "0";
      /**
       * <code>optional string recurrence_expression = 5 [default = "0"];</code>
       * @return Whether the recurrenceExpression field is set.
       */
      public boolean hasRecurrenceExpression() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional string recurrence_expression = 5 [default = "0"];</code>
       * @return The recurrenceExpression.
       */
      public java.lang.String getRecurrenceExpression() {
        java.lang.Object ref = recurrenceExpression_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            recurrenceExpression_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string recurrence_expression = 5 [default = "0"];</code>
       * @return The bytes for recurrenceExpression.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getRecurrenceExpressionBytes() {
        java.lang.Object ref = recurrenceExpression_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          recurrenceExpression_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string recurrence_expression = 5 [default = "0"];</code>
       * @param value The recurrenceExpression to set.
       * @return This builder for chaining.
       */
      public Builder setRecurrenceExpression(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        recurrenceExpression_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional string recurrence_expression = 5 [default = "0"];</code>
       * @return This builder for chaining.
       */
      public Builder clearRecurrenceExpression() {
        recurrenceExpression_ = getDefaultInstance().getRecurrenceExpression();
        bitField0_ = (bitField0_ & ~0x00000010);
        onChanged();
        return this;
      }
      /**
       * <code>optional string recurrence_expression = 5 [default = "0"];</code>
       * @param value The bytes for recurrenceExpression to set.
       * @return This builder for chaining.
       */
      public Builder setRecurrenceExpressionBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        recurrenceExpression_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto priority_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> priorityBuilder_;
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
       * @return Whether the priority field is set.
       */
      public boolean hasPriority() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
       * @return The priority.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto getPriority() {
        if (priorityBuilder_ == null) {
          return priority_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        } else {
          return priorityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
       */
      public Builder setPriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          priority_ = value;
        } else {
          priorityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
       */
      public Builder setPriority(
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder builderForValue) {
        if (priorityBuilder_ == null) {
          priority_ = builderForValue.build();
        } else {
          priorityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
       */
      public Builder mergePriority(org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto value) {
        if (priorityBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0) &&
            priority_ != null &&
            priority_ != org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance()) {
            getPriorityBuilder().mergeFrom(value);
          } else {
            priority_ = value;
          }
        } else {
          priorityBuilder_.mergeFrom(value);
        }
        if (priority_ != null) {
          bitField0_ |= 0x00000020;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
       */
      public Builder clearPriority() {
        bitField0_ = (bitField0_ & ~0x00000020);
        priority_ = null;
        if (priorityBuilder_ != null) {
          priorityBuilder_.dispose();
          priorityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder getPriorityBuilder() {
        bitField0_ |= 0x00000020;
        onChanged();
        return getPriorityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder getPriorityOrBuilder() {
        if (priorityBuilder_ != null) {
          return priorityBuilder_.getMessageOrBuilder();
        } else {
          return priority_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.getDefaultInstance() : priority_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.PriorityProto priority = 6;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder> 
          getPriorityFieldBuilder() {
        if (priorityBuilder_ == null) {
          priorityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.PriorityProtoOrBuilder>(
                  getPriority(),
                  getParentForChildren(),
                  isClean());
          priority_ = null;
        }
        return priorityBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationDefinitionProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationDefinitionProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationDefinitionProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationDefinitionProto>() {
      @java.lang.Override
      public ReservationDefinitionProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationDefinitionProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationDefinitionProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ResourceAllocationRequestProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ResourceAllocationRequestProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int64 start_time = 1;</code>
     * @return Whether the startTime field is set.
     */
    boolean hasStartTime();
    /**
     * <code>optional int64 start_time = 1;</code>
     * @return The startTime.
     */
    long getStartTime();

    /**
     * <code>optional int64 end_time = 2;</code>
     * @return Whether the endTime field is set.
     */
    boolean hasEndTime();
    /**
     * <code>optional int64 end_time = 2;</code>
     * @return The endTime.
     */
    long getEndTime();

    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
     * @return Whether the resource field is set.
     */
    boolean hasResource();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
     * @return The resource.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource();
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ResourceAllocationRequestProto}
   */
  public static final class ResourceAllocationRequestProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ResourceAllocationRequestProto)
      ResourceAllocationRequestProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ResourceAllocationRequestProto.newBuilder() to construct.
    private ResourceAllocationRequestProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ResourceAllocationRequestProto() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ResourceAllocationRequestProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceAllocationRequestProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder.class);
    }

    private int bitField0_;
    public static final int START_TIME_FIELD_NUMBER = 1;
    private long startTime_ = 0L;
    /**
     * <code>optional int64 start_time = 1;</code>
     * @return Whether the startTime field is set.
     */
    @java.lang.Override
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int64 start_time = 1;</code>
     * @return The startTime.
     */
    @java.lang.Override
    public long getStartTime() {
      return startTime_;
    }

    public static final int END_TIME_FIELD_NUMBER = 2;
    private long endTime_ = 0L;
    /**
     * <code>optional int64 end_time = 2;</code>
     * @return Whether the endTime field is set.
     */
    @java.lang.Override
    public boolean hasEndTime() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 end_time = 2;</code>
     * @return The endTime.
     */
    @java.lang.Override
    public long getEndTime() {
      return endTime_;
    }

    public static final int RESOURCE_FIELD_NUMBER = 3;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
     * @return Whether the resource field is set.
     */
    @java.lang.Override
    public boolean hasResource() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
     * @return The resource.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasResource()) {
        if (!getResource().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt64(1, startTime_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, endTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeMessage(3, getResource());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(1, startTime_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, endTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, getResource());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto) obj;

      if (hasStartTime() != other.hasStartTime()) return false;
      if (hasStartTime()) {
        if (getStartTime()
            != other.getStartTime()) return false;
      }
      if (hasEndTime() != other.hasEndTime()) return false;
      if (hasEndTime()) {
        if (getEndTime()
            != other.getEndTime()) return false;
      }
      if (hasResource() != other.hasResource()) return false;
      if (hasResource()) {
        if (!getResource()
            .equals(other.getResource())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartTime());
      }
      if (hasEndTime()) {
        hash = (37 * hash) + END_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getEndTime());
      }
      if (hasResource()) {
        hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getResource().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ResourceAllocationRequestProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ResourceAllocationRequestProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceAllocationRequestProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getResourceFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        startTime_ = 0L;
        endTime_ = 0L;
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.startTime_ = startTime_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.endTime_ = endTime_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.resource_ = resourceBuilder_ == null
              ? resource_
              : resourceBuilder_.build();
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.getDefaultInstance()) return this;
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasEndTime()) {
          setEndTime(other.getEndTime());
        }
        if (other.hasResource()) {
          mergeResource(other.getResource());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasResource()) {
          if (!getResource().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                startTime_ = input.readInt64();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 16: {
                endTime_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              case 26: {
                input.readMessage(
                    getResourceFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private long startTime_ ;
      /**
       * <code>optional int64 start_time = 1;</code>
       * @return Whether the startTime field is set.
       */
      @java.lang.Override
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int64 start_time = 1;</code>
       * @return The startTime.
       */
      @java.lang.Override
      public long getStartTime() {
        return startTime_;
      }
      /**
       * <code>optional int64 start_time = 1;</code>
       * @param value The startTime to set.
       * @return This builder for chaining.
       */
      public Builder setStartTime(long value) {

        startTime_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 start_time = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000001);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      private long endTime_ ;
      /**
       * <code>optional int64 end_time = 2;</code>
       * @return Whether the endTime field is set.
       */
      @java.lang.Override
      public boolean hasEndTime() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int64 end_time = 2;</code>
       * @return The endTime.
       */
      @java.lang.Override
      public long getEndTime() {
        return endTime_;
      }
      /**
       * <code>optional int64 end_time = 2;</code>
       * @param value The endTime to set.
       * @return This builder for chaining.
       */
      public Builder setEndTime(long value) {

        endTime_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 end_time = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearEndTime() {
        bitField0_ = (bitField0_ & ~0x00000002);
        endTime_ = 0L;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto resource_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> resourceBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
       * @return Whether the resource field is set.
       */
      public boolean hasResource() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
       * @return The resource.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getResource() {
        if (resourceBuilder_ == null) {
          return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
        } else {
          return resourceBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
       */
      public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resource_ = value;
        } else {
          resourceBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
       */
      public Builder setResource(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          resource_ = builderForValue.build();
        } else {
          resourceBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
       */
      public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (resourceBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0) &&
            resource_ != null &&
            resource_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getResourceBuilder().mergeFrom(value);
          } else {
            resource_ = value;
          }
        } else {
          resourceBuilder_.mergeFrom(value);
        }
        if (resource_ != null) {
          bitField0_ |= 0x00000004;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
       */
      public Builder clearResource() {
        bitField0_ = (bitField0_ & ~0x00000004);
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getResourceBuilder() {
        bitField0_ |= 0x00000004;
        onChanged();
        return getResourceFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getResourceOrBuilder() {
        if (resourceBuilder_ != null) {
          return resourceBuilder_.getMessageOrBuilder();
        } else {
          return resource_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : resource_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto resource = 3;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getResourceFieldBuilder() {
        if (resourceBuilder_ == null) {
          resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getResource(),
                  getParentForChildren(),
                  isClean());
          resource_ = null;
        }
        return resourceBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ResourceAllocationRequestProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ResourceAllocationRequestProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ResourceAllocationRequestProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ResourceAllocationRequestProto>() {
      @java.lang.Override
      public ResourceAllocationRequestProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ResourceAllocationRequestProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ResourceAllocationRequestProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ReservationAllocationStateProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ReservationAllocationStateProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     * @return Whether the reservationDefinition field is set.
     */
    boolean hasReservationDefinition();
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     * @return The reservationDefinition.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition();
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder();

    /**
     * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto> 
        getAllocationRequestsList();
    /**
     * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getAllocationRequests(int index);
    /**
     * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
     */
    int getAllocationRequestsCount();
    /**
     * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder> 
        getAllocationRequestsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder getAllocationRequestsOrBuilder(
        int index);

    /**
     * <code>optional int64 start_time = 3;</code>
     * @return Whether the startTime field is set.
     */
    boolean hasStartTime();
    /**
     * <code>optional int64 start_time = 3;</code>
     * @return The startTime.
     */
    long getStartTime();

    /**
     * <code>optional int64 end_time = 4;</code>
     * @return Whether the endTime field is set.
     */
    boolean hasEndTime();
    /**
     * <code>optional int64 end_time = 4;</code>
     * @return The endTime.
     */
    long getEndTime();

    /**
     * <code>optional string user = 5;</code>
     * @return Whether the user field is set.
     */
    boolean hasUser();
    /**
     * <code>optional string user = 5;</code>
     * @return The user.
     */
    java.lang.String getUser();
    /**
     * <code>optional string user = 5;</code>
     * @return The bytes for user.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes();

    /**
     * <code>optional bool contains_gangs = 6;</code>
     * @return Whether the containsGangs field is set.
     */
    boolean hasContainsGangs();
    /**
     * <code>optional bool contains_gangs = 6;</code>
     * @return The containsGangs.
     */
    boolean getContainsGangs();

    /**
     * <code>optional int64 acceptance_time = 7;</code>
     * @return Whether the acceptanceTime field is set.
     */
    boolean hasAcceptanceTime();
    /**
     * <code>optional int64 acceptance_time = 7;</code>
     * @return The acceptanceTime.
     */
    long getAcceptanceTime();

    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
     * @return Whether the reservationId field is set.
     */
    boolean hasReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
     * @return The reservationId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId();
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ReservationAllocationStateProto}
   */
  public static final class ReservationAllocationStateProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ReservationAllocationStateProto)
      ReservationAllocationStateProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ReservationAllocationStateProto.newBuilder() to construct.
    private ReservationAllocationStateProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ReservationAllocationStateProto() {
      allocationRequests_ = java.util.Collections.emptyList();
      user_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ReservationAllocationStateProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationAllocationStateProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESERVATION_DEFINITION_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_;
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     * @return Whether the reservationDefinition field is set.
     */
    @java.lang.Override
    public boolean hasReservationDefinition() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     * @return The reservationDefinition.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() {
      return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
    }
    /**
     * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() {
      return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
    }

    public static final int ALLOCATION_REQUESTS_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto> allocationRequests_;
    /**
     * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto> getAllocationRequestsList() {
      return allocationRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder> 
        getAllocationRequestsOrBuilderList() {
      return allocationRequests_;
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
     */
    @java.lang.Override
    public int getAllocationRequestsCount() {
      return allocationRequests_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getAllocationRequests(int index) {
      return allocationRequests_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder getAllocationRequestsOrBuilder(
        int index) {
      return allocationRequests_.get(index);
    }

    public static final int START_TIME_FIELD_NUMBER = 3;
    private long startTime_ = 0L;
    /**
     * <code>optional int64 start_time = 3;</code>
     * @return Whether the startTime field is set.
     */
    @java.lang.Override
    public boolean hasStartTime() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 start_time = 3;</code>
     * @return The startTime.
     */
    @java.lang.Override
    public long getStartTime() {
      return startTime_;
    }

    public static final int END_TIME_FIELD_NUMBER = 4;
    private long endTime_ = 0L;
    /**
     * <code>optional int64 end_time = 4;</code>
     * @return Whether the endTime field is set.
     */
    @java.lang.Override
    public boolean hasEndTime() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int64 end_time = 4;</code>
     * @return The endTime.
     */
    @java.lang.Override
    public long getEndTime() {
      return endTime_;
    }

    public static final int USER_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private volatile java.lang.Object user_ = "";
    /**
     * <code>optional string user = 5;</code>
     * @return Whether the user field is set.
     */
    @java.lang.Override
    public boolean hasUser() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional string user = 5;</code>
     * @return The user.
     */
    @java.lang.Override
    public java.lang.String getUser() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          user_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string user = 5;</code>
     * @return The bytes for user.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        user_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int CONTAINS_GANGS_FIELD_NUMBER = 6;
    private boolean containsGangs_ = false;
    /**
     * <code>optional bool contains_gangs = 6;</code>
     * @return Whether the containsGangs field is set.
     */
    @java.lang.Override
    public boolean hasContainsGangs() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional bool contains_gangs = 6;</code>
     * @return The containsGangs.
     */
    @java.lang.Override
    public boolean getContainsGangs() {
      return containsGangs_;
    }

    public static final int ACCEPTANCE_TIME_FIELD_NUMBER = 7;
    private long acceptanceTime_ = 0L;
    /**
     * <code>optional int64 acceptance_time = 7;</code>
     * @return Whether the acceptanceTime field is set.
     */
    @java.lang.Override
    public boolean hasAcceptanceTime() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional int64 acceptance_time = 7;</code>
     * @return The acceptanceTime.
     */
    @java.lang.Override
    public long getAcceptanceTime() {
      return acceptanceTime_;
    }

    public static final int RESERVATION_ID_FIELD_NUMBER = 8;
    private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
     * @return Whether the reservationId field is set.
     */
    @java.lang.Override
    public boolean hasReservationId() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
     * @return The reservationId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }
    /**
     * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
      return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasReservationDefinition()) {
        if (!getReservationDefinition().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      for (int i = 0; i < getAllocationRequestsCount(); i++) {
        if (!getAllocationRequests(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getReservationDefinition());
      }
      for (int i = 0; i < allocationRequests_.size(); i++) {
        output.writeMessage(2, allocationRequests_.get(i));
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(3, startTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(4, endTime_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, user_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeBool(6, containsGangs_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeInt64(7, acceptanceTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeMessage(8, getReservationId());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getReservationDefinition());
      }
      for (int i = 0; i < allocationRequests_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, allocationRequests_.get(i));
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(3, startTime_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(4, endTime_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(5, user_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(6, containsGangs_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(7, acceptanceTime_);
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(8, getReservationId());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto) obj;

      if (hasReservationDefinition() != other.hasReservationDefinition()) return false;
      if (hasReservationDefinition()) {
        if (!getReservationDefinition()
            .equals(other.getReservationDefinition())) return false;
      }
      if (!getAllocationRequestsList()
          .equals(other.getAllocationRequestsList())) return false;
      if (hasStartTime() != other.hasStartTime()) return false;
      if (hasStartTime()) {
        if (getStartTime()
            != other.getStartTime()) return false;
      }
      if (hasEndTime() != other.hasEndTime()) return false;
      if (hasEndTime()) {
        if (getEndTime()
            != other.getEndTime()) return false;
      }
      if (hasUser() != other.hasUser()) return false;
      if (hasUser()) {
        if (!getUser()
            .equals(other.getUser())) return false;
      }
      if (hasContainsGangs() != other.hasContainsGangs()) return false;
      if (hasContainsGangs()) {
        if (getContainsGangs()
            != other.getContainsGangs()) return false;
      }
      if (hasAcceptanceTime() != other.hasAcceptanceTime()) return false;
      if (hasAcceptanceTime()) {
        if (getAcceptanceTime()
            != other.getAcceptanceTime()) return false;
      }
      if (hasReservationId() != other.hasReservationId()) return false;
      if (hasReservationId()) {
        if (!getReservationId()
            .equals(other.getReservationId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasReservationDefinition()) {
        hash = (37 * hash) + RESERVATION_DEFINITION_FIELD_NUMBER;
        hash = (53 * hash) + getReservationDefinition().hashCode();
      }
      if (getAllocationRequestsCount() > 0) {
        hash = (37 * hash) + ALLOCATION_REQUESTS_FIELD_NUMBER;
        hash = (53 * hash) + getAllocationRequestsList().hashCode();
      }
      if (hasStartTime()) {
        hash = (37 * hash) + START_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getStartTime());
      }
      if (hasEndTime()) {
        hash = (37 * hash) + END_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getEndTime());
      }
      if (hasUser()) {
        hash = (37 * hash) + USER_FIELD_NUMBER;
        hash = (53 * hash) + getUser().hashCode();
      }
      if (hasContainsGangs()) {
        hash = (37 * hash) + CONTAINS_GANGS_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getContainsGangs());
      }
      if (hasAcceptanceTime()) {
        hash = (37 * hash) + ACCEPTANCE_TIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAcceptanceTime());
      }
      if (hasReservationId()) {
        hash = (37 * hash) + RESERVATION_ID_FIELD_NUMBER;
        hash = (53 * hash) + getReservationId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ReservationAllocationStateProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ReservationAllocationStateProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationAllocationStateProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getReservationDefinitionFieldBuilder();
          getAllocationRequestsFieldBuilder();
          getReservationIdFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        reservationDefinition_ = null;
        if (reservationDefinitionBuilder_ != null) {
          reservationDefinitionBuilder_.dispose();
          reservationDefinitionBuilder_ = null;
        }
        if (allocationRequestsBuilder_ == null) {
          allocationRequests_ = java.util.Collections.emptyList();
        } else {
          allocationRequests_ = null;
          allocationRequestsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        startTime_ = 0L;
        endTime_ = 0L;
        user_ = "";
        containsGangs_ = false;
        acceptanceTime_ = 0L;
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto result) {
        if (allocationRequestsBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            allocationRequests_ = java.util.Collections.unmodifiableList(allocationRequests_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.allocationRequests_ = allocationRequests_;
        } else {
          result.allocationRequests_ = allocationRequestsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.reservationDefinition_ = reservationDefinitionBuilder_ == null
              ? reservationDefinition_
              : reservationDefinitionBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.startTime_ = startTime_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.endTime_ = endTime_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.user_ = user_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.containsGangs_ = containsGangs_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.acceptanceTime_ = acceptanceTime_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.reservationId_ = reservationIdBuilder_ == null
              ? reservationId_
              : reservationIdBuilder_.build();
          to_bitField0_ |= 0x00000040;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto.getDefaultInstance()) return this;
        if (other.hasReservationDefinition()) {
          mergeReservationDefinition(other.getReservationDefinition());
        }
        if (allocationRequestsBuilder_ == null) {
          if (!other.allocationRequests_.isEmpty()) {
            if (allocationRequests_.isEmpty()) {
              allocationRequests_ = other.allocationRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureAllocationRequestsIsMutable();
              allocationRequests_.addAll(other.allocationRequests_);
            }
            onChanged();
          }
        } else {
          if (!other.allocationRequests_.isEmpty()) {
            if (allocationRequestsBuilder_.isEmpty()) {
              allocationRequestsBuilder_.dispose();
              allocationRequestsBuilder_ = null;
              allocationRequests_ = other.allocationRequests_;
              bitField0_ = (bitField0_ & ~0x00000002);
              allocationRequestsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getAllocationRequestsFieldBuilder() : null;
            } else {
              allocationRequestsBuilder_.addAllMessages(other.allocationRequests_);
            }
          }
        }
        if (other.hasStartTime()) {
          setStartTime(other.getStartTime());
        }
        if (other.hasEndTime()) {
          setEndTime(other.getEndTime());
        }
        if (other.hasUser()) {
          user_ = other.user_;
          bitField0_ |= 0x00000010;
          onChanged();
        }
        if (other.hasContainsGangs()) {
          setContainsGangs(other.getContainsGangs());
        }
        if (other.hasAcceptanceTime()) {
          setAcceptanceTime(other.getAcceptanceTime());
        }
        if (other.hasReservationId()) {
          mergeReservationId(other.getReservationId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasReservationDefinition()) {
          if (!getReservationDefinition().isInitialized()) {
            return false;
          }
        }
        for (int i = 0; i < getAllocationRequestsCount(); i++) {
          if (!getAllocationRequests(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getReservationDefinitionFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.PARSER,
                        extensionRegistry);
                if (allocationRequestsBuilder_ == null) {
                  ensureAllocationRequestsIsMutable();
                  allocationRequests_.add(m);
                } else {
                  allocationRequestsBuilder_.addMessage(m);
                }
                break;
              } // case 18
              case 24: {
                startTime_ = input.readInt64();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 32: {
                endTime_ = input.readInt64();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              case 42: {
                user_ = input.readBytes();
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 48: {
                containsGangs_ = input.readBool();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 56: {
                acceptanceTime_ = input.readInt64();
                bitField0_ |= 0x00000040;
                break;
              } // case 56
              case 66: {
                input.readMessage(
                    getReservationIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000080;
                break;
              } // case 66
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto reservationDefinition_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> reservationDefinitionBuilder_;
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       * @return Whether the reservationDefinition field is set.
       */
      public boolean hasReservationDefinition() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       * @return The reservationDefinition.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto getReservationDefinition() {
        if (reservationDefinitionBuilder_ == null) {
          return reservationDefinition_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
        } else {
          return reservationDefinitionBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public Builder setReservationDefinition(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) {
        if (reservationDefinitionBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservationDefinition_ = value;
        } else {
          reservationDefinitionBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public Builder setReservationDefinition(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder builderForValue) {
        if (reservationDefinitionBuilder_ == null) {
          reservationDefinition_ = builderForValue.build();
        } else {
          reservationDefinitionBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public Builder mergeReservationDefinition(org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto value) {
        if (reservationDefinitionBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            reservationDefinition_ != null &&
            reservationDefinition_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance()) {
            getReservationDefinitionBuilder().mergeFrom(value);
          } else {
            reservationDefinition_ = value;
          }
        } else {
          reservationDefinitionBuilder_.mergeFrom(value);
        }
        if (reservationDefinition_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public Builder clearReservationDefinition() {
        bitField0_ = (bitField0_ & ~0x00000001);
        reservationDefinition_ = null;
        if (reservationDefinitionBuilder_ != null) {
          reservationDefinitionBuilder_.dispose();
          reservationDefinitionBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder getReservationDefinitionBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getReservationDefinitionFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder getReservationDefinitionOrBuilder() {
        if (reservationDefinitionBuilder_ != null) {
          return reservationDefinitionBuilder_.getMessageOrBuilder();
        } else {
          return reservationDefinition_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.getDefaultInstance() : reservationDefinition_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationDefinitionProto reservation_definition = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder> 
          getReservationDefinitionFieldBuilder() {
        if (reservationDefinitionBuilder_ == null) {
          reservationDefinitionBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationDefinitionProtoOrBuilder>(
                  getReservationDefinition(),
                  getParentForChildren(),
                  isClean());
          reservationDefinition_ = null;
        }
        return reservationDefinitionBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto> allocationRequests_ =
        java.util.Collections.emptyList();
      private void ensureAllocationRequestsIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          allocationRequests_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto>(allocationRequests_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder> allocationRequestsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto> getAllocationRequestsList() {
        if (allocationRequestsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(allocationRequests_);
        } else {
          return allocationRequestsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public int getAllocationRequestsCount() {
        if (allocationRequestsBuilder_ == null) {
          return allocationRequests_.size();
        } else {
          return allocationRequestsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto getAllocationRequests(int index) {
        if (allocationRequestsBuilder_ == null) {
          return allocationRequests_.get(index);
        } else {
          return allocationRequestsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public Builder setAllocationRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto value) {
        if (allocationRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAllocationRequestsIsMutable();
          allocationRequests_.set(index, value);
          onChanged();
        } else {
          allocationRequestsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public Builder setAllocationRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder builderForValue) {
        if (allocationRequestsBuilder_ == null) {
          ensureAllocationRequestsIsMutable();
          allocationRequests_.set(index, builderForValue.build());
          onChanged();
        } else {
          allocationRequestsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public Builder addAllocationRequests(org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto value) {
        if (allocationRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAllocationRequestsIsMutable();
          allocationRequests_.add(value);
          onChanged();
        } else {
          allocationRequestsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public Builder addAllocationRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto value) {
        if (allocationRequestsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAllocationRequestsIsMutable();
          allocationRequests_.add(index, value);
          onChanged();
        } else {
          allocationRequestsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public Builder addAllocationRequests(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder builderForValue) {
        if (allocationRequestsBuilder_ == null) {
          ensureAllocationRequestsIsMutable();
          allocationRequests_.add(builderForValue.build());
          onChanged();
        } else {
          allocationRequestsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public Builder addAllocationRequests(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder builderForValue) {
        if (allocationRequestsBuilder_ == null) {
          ensureAllocationRequestsIsMutable();
          allocationRequests_.add(index, builderForValue.build());
          onChanged();
        } else {
          allocationRequestsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public Builder addAllAllocationRequests(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto> values) {
        if (allocationRequestsBuilder_ == null) {
          ensureAllocationRequestsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, allocationRequests_);
          onChanged();
        } else {
          allocationRequestsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public Builder clearAllocationRequests() {
        if (allocationRequestsBuilder_ == null) {
          allocationRequests_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          allocationRequestsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public Builder removeAllocationRequests(int index) {
        if (allocationRequestsBuilder_ == null) {
          ensureAllocationRequestsIsMutable();
          allocationRequests_.remove(index);
          onChanged();
        } else {
          allocationRequestsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder getAllocationRequestsBuilder(
          int index) {
        return getAllocationRequestsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder getAllocationRequestsOrBuilder(
          int index) {
        if (allocationRequestsBuilder_ == null) {
          return allocationRequests_.get(index);  } else {
          return allocationRequestsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder> 
           getAllocationRequestsOrBuilderList() {
        if (allocationRequestsBuilder_ != null) {
          return allocationRequestsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(allocationRequests_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder addAllocationRequestsBuilder() {
        return getAllocationRequestsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder addAllocationRequestsBuilder(
          int index) {
        return getAllocationRequestsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ResourceAllocationRequestProto allocation_requests = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder> 
           getAllocationRequestsBuilderList() {
        return getAllocationRequestsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder> 
          getAllocationRequestsFieldBuilder() {
        if (allocationRequestsBuilder_ == null) {
          allocationRequestsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceAllocationRequestProtoOrBuilder>(
                  allocationRequests_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          allocationRequests_ = null;
        }
        return allocationRequestsBuilder_;
      }

      private long startTime_ ;
      /**
       * <code>optional int64 start_time = 3;</code>
       * @return Whether the startTime field is set.
       */
      @java.lang.Override
      public boolean hasStartTime() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int64 start_time = 3;</code>
       * @return The startTime.
       */
      @java.lang.Override
      public long getStartTime() {
        return startTime_;
      }
      /**
       * <code>optional int64 start_time = 3;</code>
       * @param value The startTime to set.
       * @return This builder for chaining.
       */
      public Builder setStartTime(long value) {

        startTime_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 start_time = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearStartTime() {
        bitField0_ = (bitField0_ & ~0x00000004);
        startTime_ = 0L;
        onChanged();
        return this;
      }

      private long endTime_ ;
      /**
       * <code>optional int64 end_time = 4;</code>
       * @return Whether the endTime field is set.
       */
      @java.lang.Override
      public boolean hasEndTime() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int64 end_time = 4;</code>
       * @return The endTime.
       */
      @java.lang.Override
      public long getEndTime() {
        return endTime_;
      }
      /**
       * <code>optional int64 end_time = 4;</code>
       * @param value The endTime to set.
       * @return This builder for chaining.
       */
      public Builder setEndTime(long value) {

        endTime_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 end_time = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearEndTime() {
        bitField0_ = (bitField0_ & ~0x00000008);
        endTime_ = 0L;
        onChanged();
        return this;
      }

      private java.lang.Object user_ = "";
      /**
       * <code>optional string user = 5;</code>
       * @return Whether the user field is set.
       */
      public boolean hasUser() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional string user = 5;</code>
       * @return The user.
       */
      public java.lang.String getUser() {
        java.lang.Object ref = user_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            user_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string user = 5;</code>
       * @return The bytes for user.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUserBytes() {
        java.lang.Object ref = user_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          user_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string user = 5;</code>
       * @param value The user to set.
       * @return This builder for chaining.
       */
      public Builder setUser(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        user_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional string user = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearUser() {
        user_ = getDefaultInstance().getUser();
        bitField0_ = (bitField0_ & ~0x00000010);
        onChanged();
        return this;
      }
      /**
       * <code>optional string user = 5;</code>
       * @param value The bytes for user to set.
       * @return This builder for chaining.
       */
      public Builder setUserBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        user_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }

      private boolean containsGangs_ ;
      /**
       * <code>optional bool contains_gangs = 6;</code>
       * @return Whether the containsGangs field is set.
       */
      @java.lang.Override
      public boolean hasContainsGangs() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional bool contains_gangs = 6;</code>
       * @return The containsGangs.
       */
      @java.lang.Override
      public boolean getContainsGangs() {
        return containsGangs_;
      }
      /**
       * <code>optional bool contains_gangs = 6;</code>
       * @param value The containsGangs to set.
       * @return This builder for chaining.
       */
      public Builder setContainsGangs(boolean value) {

        containsGangs_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional bool contains_gangs = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearContainsGangs() {
        bitField0_ = (bitField0_ & ~0x00000020);
        containsGangs_ = false;
        onChanged();
        return this;
      }

      private long acceptanceTime_ ;
      /**
       * <code>optional int64 acceptance_time = 7;</code>
       * @return Whether the acceptanceTime field is set.
       */
      @java.lang.Override
      public boolean hasAcceptanceTime() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional int64 acceptance_time = 7;</code>
       * @return The acceptanceTime.
       */
      @java.lang.Override
      public long getAcceptanceTime() {
        return acceptanceTime_;
      }
      /**
       * <code>optional int64 acceptance_time = 7;</code>
       * @param value The acceptanceTime to set.
       * @return This builder for chaining.
       */
      public Builder setAcceptanceTime(long value) {

        acceptanceTime_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 acceptance_time = 7;</code>
       * @return This builder for chaining.
       */
      public Builder clearAcceptanceTime() {
        bitField0_ = (bitField0_ & ~0x00000040);
        acceptanceTime_ = 0L;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto reservationId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> reservationIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
       * @return Whether the reservationId field is set.
       */
      public boolean hasReservationId() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
       * @return The reservationId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto getReservationId() {
        if (reservationIdBuilder_ == null) {
          return reservationId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        } else {
          return reservationIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
       */
      public Builder setReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          reservationId_ = value;
        } else {
          reservationIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
       */
      public Builder setReservationId(
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder builderForValue) {
        if (reservationIdBuilder_ == null) {
          reservationId_ = builderForValue.build();
        } else {
          reservationIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
       */
      public Builder mergeReservationId(org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto value) {
        if (reservationIdBuilder_ == null) {
          if (((bitField0_ & 0x00000080) != 0) &&
            reservationId_ != null &&
            reservationId_ != org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance()) {
            getReservationIdBuilder().mergeFrom(value);
          } else {
            reservationId_ = value;
          }
        } else {
          reservationIdBuilder_.mergeFrom(value);
        }
        if (reservationId_ != null) {
          bitField0_ |= 0x00000080;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
       */
      public Builder clearReservationId() {
        bitField0_ = (bitField0_ & ~0x00000080);
        reservationId_ = null;
        if (reservationIdBuilder_ != null) {
          reservationIdBuilder_.dispose();
          reservationIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder getReservationIdBuilder() {
        bitField0_ |= 0x00000080;
        onChanged();
        return getReservationIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder getReservationIdOrBuilder() {
        if (reservationIdBuilder_ != null) {
          return reservationIdBuilder_.getMessageOrBuilder();
        } else {
          return reservationId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.getDefaultInstance() : reservationId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ReservationIdProto reservation_id = 8;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder> 
          getReservationIdFieldBuilder() {
        if (reservationIdBuilder_ == null) {
          reservationIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ReservationIdProtoOrBuilder>(
                  getReservationId(),
                  getParentForChildren(),
                  isClean());
          reservationId_ = null;
        }
        return reservationIdBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ReservationAllocationStateProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ReservationAllocationStateProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ReservationAllocationStateProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ReservationAllocationStateProto>() {
      @java.lang.Override
      public ReservationAllocationStateProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ReservationAllocationStateProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ReservationAllocationStateProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ReservationAllocationStateProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerLaunchContextProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerLaunchContextProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> 
        getLocalResourcesList();
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index);
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
     */
    int getLocalResourcesCount();
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> 
        getLocalResourcesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder(
        int index);

    /**
     * <code>optional bytes tokens = 2;</code>
     * @return Whether the tokens field is set.
     */
    boolean hasTokens();
    /**
     * <code>optional bytes tokens = 2;</code>
     * @return The tokens.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString getTokens();

    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> 
        getServiceDataList();
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServiceData(int index);
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
     */
    int getServiceDataCount();
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
        getServiceDataOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServiceDataOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> 
        getEnvironmentList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getEnvironment(int index);
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
     */
    int getEnvironmentCount();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getEnvironmentOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getEnvironmentOrBuilder(
        int index);

    /**
     * <code>repeated string command = 5;</code>
     * @return A list containing the command.
     */
    java.util.List<java.lang.String>
        getCommandList();
    /**
     * <code>repeated string command = 5;</code>
     * @return The count of command.
     */
    int getCommandCount();
    /**
     * <code>repeated string command = 5;</code>
     * @param index The index of the element to return.
     * @return The command at the given index.
     */
    java.lang.String getCommand(int index);
    /**
     * <code>repeated string command = 5;</code>
     * @param index The index of the value to return.
     * @return The bytes of the command at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getCommandBytes(int index);

    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> 
        getApplicationACLsList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index);
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
     */
    int getApplicationACLsCount();
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
        getApplicationACLsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder(
        int index);

    /**
     * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
     * @return Whether the containerRetryContext field is set.
     */
    boolean hasContainerRetryContext();
    /**
     * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
     * @return The containerRetryContext.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getContainerRetryContext();
    /**
     * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder getContainerRetryContextOrBuilder();

    /**
     * <code>optional bytes tokens_conf = 8;</code>
     * @return Whether the tokensConf field is set.
     */
    boolean hasTokensConf();
    /**
     * <code>optional bytes tokens_conf = 8;</code>
     * @return The tokensConf.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString getTokensConf();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerLaunchContextProto}
   */
  public static final class ContainerLaunchContextProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerLaunchContextProto)
      ContainerLaunchContextProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerLaunchContextProto.newBuilder() to construct.
    private ContainerLaunchContextProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerLaunchContextProto() {
      localResources_ = java.util.Collections.emptyList();
      tokens_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
      serviceData_ = java.util.Collections.emptyList();
      environment_ = java.util.Collections.emptyList();
      command_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      applicationACLs_ = java.util.Collections.emptyList();
      tokensConf_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerLaunchContextProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerLaunchContextProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder.class);
    }

    private int bitField0_;
    public static final int LOCALRESOURCES_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> localResources_;
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> getLocalResourcesList() {
      return localResources_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> 
        getLocalResourcesOrBuilderList() {
      return localResources_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
     */
    @java.lang.Override
    public int getLocalResourcesCount() {
      return localResources_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index) {
      return localResources_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder(
        int index) {
      return localResources_.get(index);
    }

    public static final int TOKENS_FIELD_NUMBER = 2;
    private org.apache.hadoop.thirdparty.protobuf.ByteString tokens_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
    /**
     * <code>optional bytes tokens = 2;</code>
     * @return Whether the tokens field is set.
     */
    @java.lang.Override
    public boolean hasTokens() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional bytes tokens = 2;</code>
     * @return The tokens.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString getTokens() {
      return tokens_;
    }

    public static final int SERVICE_DATA_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> serviceData_;
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> getServiceDataList() {
      return serviceData_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
        getServiceDataOrBuilderList() {
      return serviceData_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
     */
    @java.lang.Override
    public int getServiceDataCount() {
      return serviceData_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServiceData(int index) {
      return serviceData_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServiceDataOrBuilder(
        int index) {
      return serviceData_.get(index);
    }

    public static final int ENVIRONMENT_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> environment_;
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getEnvironmentList() {
      return environment_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getEnvironmentOrBuilderList() {
      return environment_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
     */
    @java.lang.Override
    public int getEnvironmentCount() {
      return environment_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getEnvironment(int index) {
      return environment_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getEnvironmentOrBuilder(
        int index) {
      return environment_.get(index);
    }

    public static final int COMMAND_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList command_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string command = 5;</code>
     * @return A list containing the command.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getCommandList() {
      return command_;
    }
    /**
     * <code>repeated string command = 5;</code>
     * @return The count of command.
     */
    public int getCommandCount() {
      return command_.size();
    }
    /**
     * <code>repeated string command = 5;</code>
     * @param index The index of the element to return.
     * @return The command at the given index.
     */
    public java.lang.String getCommand(int index) {
      return command_.get(index);
    }
    /**
     * <code>repeated string command = 5;</code>
     * @param index The index of the value to return.
     * @return The bytes of the command at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getCommandBytes(int index) {
      return command_.getByteString(index);
    }

    public static final int APPLICATION_ACLS_FIELD_NUMBER = 6;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> applicationACLs_;
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> getApplicationACLsList() {
      return applicationACLs_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
        getApplicationACLsOrBuilderList() {
      return applicationACLs_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
     */
    @java.lang.Override
    public int getApplicationACLsCount() {
      return applicationACLs_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index) {
      return applicationACLs_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder(
        int index) {
      return applicationACLs_.get(index);
    }

    public static final int CONTAINER_RETRY_CONTEXT_FIELD_NUMBER = 7;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto containerRetryContext_;
    /**
     * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
     * @return Whether the containerRetryContext field is set.
     */
    @java.lang.Override
    public boolean hasContainerRetryContext() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
     * @return The containerRetryContext.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getContainerRetryContext() {
      return containerRetryContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance() : containerRetryContext_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder getContainerRetryContextOrBuilder() {
      return containerRetryContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance() : containerRetryContext_;
    }

    public static final int TOKENS_CONF_FIELD_NUMBER = 8;
    private org.apache.hadoop.thirdparty.protobuf.ByteString tokensConf_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
    /**
     * <code>optional bytes tokens_conf = 8;</code>
     * @return Whether the tokensConf field is set.
     */
    @java.lang.Override
    public boolean hasTokensConf() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional bytes tokens_conf = 8;</code>
     * @return The tokensConf.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString getTokensConf() {
      return tokensConf_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      for (int i = 0; i < localResources_.size(); i++) {
        output.writeMessage(1, localResources_.get(i));
      }
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeBytes(2, tokens_);
      }
      for (int i = 0; i < serviceData_.size(); i++) {
        output.writeMessage(3, serviceData_.get(i));
      }
      for (int i = 0; i < environment_.size(); i++) {
        output.writeMessage(4, environment_.get(i));
      }
      for (int i = 0; i < command_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, command_.getRaw(i));
      }
      for (int i = 0; i < applicationACLs_.size(); i++) {
        output.writeMessage(6, applicationACLs_.get(i));
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(7, getContainerRetryContext());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeBytes(8, tokensConf_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      for (int i = 0; i < localResources_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, localResources_.get(i));
      }
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBytesSize(2, tokens_);
      }
      for (int i = 0; i < serviceData_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, serviceData_.get(i));
      }
      for (int i = 0; i < environment_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, environment_.get(i));
      }
      {
        int dataSize = 0;
        for (int i = 0; i < command_.size(); i++) {
          dataSize += computeStringSizeNoTag(command_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getCommandList().size();
      }
      for (int i = 0; i < applicationACLs_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(6, applicationACLs_.get(i));
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, getContainerRetryContext());
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBytesSize(8, tokensConf_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto) obj;

      if (!getLocalResourcesList()
          .equals(other.getLocalResourcesList())) return false;
      if (hasTokens() != other.hasTokens()) return false;
      if (hasTokens()) {
        if (!getTokens()
            .equals(other.getTokens())) return false;
      }
      if (!getServiceDataList()
          .equals(other.getServiceDataList())) return false;
      if (!getEnvironmentList()
          .equals(other.getEnvironmentList())) return false;
      if (!getCommandList()
          .equals(other.getCommandList())) return false;
      if (!getApplicationACLsList()
          .equals(other.getApplicationACLsList())) return false;
      if (hasContainerRetryContext() != other.hasContainerRetryContext()) return false;
      if (hasContainerRetryContext()) {
        if (!getContainerRetryContext()
            .equals(other.getContainerRetryContext())) return false;
      }
      if (hasTokensConf() != other.hasTokensConf()) return false;
      if (hasTokensConf()) {
        if (!getTokensConf()
            .equals(other.getTokensConf())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (getLocalResourcesCount() > 0) {
        hash = (37 * hash) + LOCALRESOURCES_FIELD_NUMBER;
        hash = (53 * hash) + getLocalResourcesList().hashCode();
      }
      if (hasTokens()) {
        hash = (37 * hash) + TOKENS_FIELD_NUMBER;
        hash = (53 * hash) + getTokens().hashCode();
      }
      if (getServiceDataCount() > 0) {
        hash = (37 * hash) + SERVICE_DATA_FIELD_NUMBER;
        hash = (53 * hash) + getServiceDataList().hashCode();
      }
      if (getEnvironmentCount() > 0) {
        hash = (37 * hash) + ENVIRONMENT_FIELD_NUMBER;
        hash = (53 * hash) + getEnvironmentList().hashCode();
      }
      if (getCommandCount() > 0) {
        hash = (37 * hash) + COMMAND_FIELD_NUMBER;
        hash = (53 * hash) + getCommandList().hashCode();
      }
      if (getApplicationACLsCount() > 0) {
        hash = (37 * hash) + APPLICATION_ACLS_FIELD_NUMBER;
        hash = (53 * hash) + getApplicationACLsList().hashCode();
      }
      if (hasContainerRetryContext()) {
        hash = (37 * hash) + CONTAINER_RETRY_CONTEXT_FIELD_NUMBER;
        hash = (53 * hash) + getContainerRetryContext().hashCode();
      }
      if (hasTokensConf()) {
        hash = (37 * hash) + TOKENS_CONF_FIELD_NUMBER;
        hash = (53 * hash) + getTokensConf().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerLaunchContextProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerLaunchContextProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerLaunchContextProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getLocalResourcesFieldBuilder();
          getServiceDataFieldBuilder();
          getEnvironmentFieldBuilder();
          getApplicationACLsFieldBuilder();
          getContainerRetryContextFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        if (localResourcesBuilder_ == null) {
          localResources_ = java.util.Collections.emptyList();
        } else {
          localResources_ = null;
          localResourcesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000001);
        tokens_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
        if (serviceDataBuilder_ == null) {
          serviceData_ = java.util.Collections.emptyList();
        } else {
          serviceData_ = null;
          serviceDataBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        if (environmentBuilder_ == null) {
          environment_ = java.util.Collections.emptyList();
        } else {
          environment_ = null;
          environmentBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        command_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        if (applicationACLsBuilder_ == null) {
          applicationACLs_ = java.util.Collections.emptyList();
        } else {
          applicationACLs_ = null;
          applicationACLsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000020);
        containerRetryContext_ = null;
        if (containerRetryContextBuilder_ != null) {
          containerRetryContextBuilder_.dispose();
          containerRetryContextBuilder_ = null;
        }
        tokensConf_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto result) {
        if (localResourcesBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0)) {
            localResources_ = java.util.Collections.unmodifiableList(localResources_);
            bitField0_ = (bitField0_ & ~0x00000001);
          }
          result.localResources_ = localResources_;
        } else {
          result.localResources_ = localResourcesBuilder_.build();
        }
        if (serviceDataBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0)) {
            serviceData_ = java.util.Collections.unmodifiableList(serviceData_);
            bitField0_ = (bitField0_ & ~0x00000004);
          }
          result.serviceData_ = serviceData_;
        } else {
          result.serviceData_ = serviceDataBuilder_.build();
        }
        if (environmentBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0)) {
            environment_ = java.util.Collections.unmodifiableList(environment_);
            bitField0_ = (bitField0_ & ~0x00000008);
          }
          result.environment_ = environment_;
        } else {
          result.environment_ = environmentBuilder_.build();
        }
        if (applicationACLsBuilder_ == null) {
          if (((bitField0_ & 0x00000020) != 0)) {
            applicationACLs_ = java.util.Collections.unmodifiableList(applicationACLs_);
            bitField0_ = (bitField0_ & ~0x00000020);
          }
          result.applicationACLs_ = applicationACLs_;
        } else {
          result.applicationACLs_ = applicationACLsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.tokens_ = tokens_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          command_.makeImmutable();
          result.command_ = command_;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.containerRetryContext_ = containerRetryContextBuilder_ == null
              ? containerRetryContext_
              : containerRetryContextBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.tokensConf_ = tokensConf_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto.getDefaultInstance()) return this;
        if (localResourcesBuilder_ == null) {
          if (!other.localResources_.isEmpty()) {
            if (localResources_.isEmpty()) {
              localResources_ = other.localResources_;
              bitField0_ = (bitField0_ & ~0x00000001);
            } else {
              ensureLocalResourcesIsMutable();
              localResources_.addAll(other.localResources_);
            }
            onChanged();
          }
        } else {
          if (!other.localResources_.isEmpty()) {
            if (localResourcesBuilder_.isEmpty()) {
              localResourcesBuilder_.dispose();
              localResourcesBuilder_ = null;
              localResources_ = other.localResources_;
              bitField0_ = (bitField0_ & ~0x00000001);
              localResourcesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getLocalResourcesFieldBuilder() : null;
            } else {
              localResourcesBuilder_.addAllMessages(other.localResources_);
            }
          }
        }
        if (other.hasTokens()) {
          setTokens(other.getTokens());
        }
        if (serviceDataBuilder_ == null) {
          if (!other.serviceData_.isEmpty()) {
            if (serviceData_.isEmpty()) {
              serviceData_ = other.serviceData_;
              bitField0_ = (bitField0_ & ~0x00000004);
            } else {
              ensureServiceDataIsMutable();
              serviceData_.addAll(other.serviceData_);
            }
            onChanged();
          }
        } else {
          if (!other.serviceData_.isEmpty()) {
            if (serviceDataBuilder_.isEmpty()) {
              serviceDataBuilder_.dispose();
              serviceDataBuilder_ = null;
              serviceData_ = other.serviceData_;
              bitField0_ = (bitField0_ & ~0x00000004);
              serviceDataBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getServiceDataFieldBuilder() : null;
            } else {
              serviceDataBuilder_.addAllMessages(other.serviceData_);
            }
          }
        }
        if (environmentBuilder_ == null) {
          if (!other.environment_.isEmpty()) {
            if (environment_.isEmpty()) {
              environment_ = other.environment_;
              bitField0_ = (bitField0_ & ~0x00000008);
            } else {
              ensureEnvironmentIsMutable();
              environment_.addAll(other.environment_);
            }
            onChanged();
          }
        } else {
          if (!other.environment_.isEmpty()) {
            if (environmentBuilder_.isEmpty()) {
              environmentBuilder_.dispose();
              environmentBuilder_ = null;
              environment_ = other.environment_;
              bitField0_ = (bitField0_ & ~0x00000008);
              environmentBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getEnvironmentFieldBuilder() : null;
            } else {
              environmentBuilder_.addAllMessages(other.environment_);
            }
          }
        }
        if (!other.command_.isEmpty()) {
          if (command_.isEmpty()) {
            command_ = other.command_;
            bitField0_ |= 0x00000010;
          } else {
            ensureCommandIsMutable();
            command_.addAll(other.command_);
          }
          onChanged();
        }
        if (applicationACLsBuilder_ == null) {
          if (!other.applicationACLs_.isEmpty()) {
            if (applicationACLs_.isEmpty()) {
              applicationACLs_ = other.applicationACLs_;
              bitField0_ = (bitField0_ & ~0x00000020);
            } else {
              ensureApplicationACLsIsMutable();
              applicationACLs_.addAll(other.applicationACLs_);
            }
            onChanged();
          }
        } else {
          if (!other.applicationACLs_.isEmpty()) {
            if (applicationACLsBuilder_.isEmpty()) {
              applicationACLsBuilder_.dispose();
              applicationACLsBuilder_ = null;
              applicationACLs_ = other.applicationACLs_;
              bitField0_ = (bitField0_ & ~0x00000020);
              applicationACLsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getApplicationACLsFieldBuilder() : null;
            } else {
              applicationACLsBuilder_.addAllMessages(other.applicationACLs_);
            }
          }
        }
        if (other.hasContainerRetryContext()) {
          mergeContainerRetryContext(other.getContainerRetryContext());
        }
        if (other.hasTokensConf()) {
          setTokensConf(other.getTokensConf());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.PARSER,
                        extensionRegistry);
                if (localResourcesBuilder_ == null) {
                  ensureLocalResourcesIsMutable();
                  localResources_.add(m);
                } else {
                  localResourcesBuilder_.addMessage(m);
                }
                break;
              } // case 10
              case 18: {
                tokens_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.PARSER,
                        extensionRegistry);
                if (serviceDataBuilder_ == null) {
                  ensureServiceDataIsMutable();
                  serviceData_.add(m);
                } else {
                  serviceDataBuilder_.addMessage(m);
                }
                break;
              } // case 26
              case 34: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER,
                        extensionRegistry);
                if (environmentBuilder_ == null) {
                  ensureEnvironmentIsMutable();
                  environment_.add(m);
                } else {
                  environmentBuilder_.addMessage(m);
                }
                break;
              } // case 34
              case 42: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureCommandIsMutable();
                command_.add(bs);
                break;
              } // case 42
              case 50: {
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.PARSER,
                        extensionRegistry);
                if (applicationACLsBuilder_ == null) {
                  ensureApplicationACLsIsMutable();
                  applicationACLs_.add(m);
                } else {
                  applicationACLsBuilder_.addMessage(m);
                }
                break;
              } // case 50
              case 58: {
                input.readMessage(
                    getContainerRetryContextFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000040;
                break;
              } // case 58
              case 66: {
                tokensConf_ = input.readBytes();
                bitField0_ |= 0x00000080;
                break;
              } // case 66
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> localResources_ =
        java.util.Collections.emptyList();
      private void ensureLocalResourcesIsMutable() {
        if (!((bitField0_ & 0x00000001) != 0)) {
          localResources_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto>(localResources_);
          bitField0_ |= 0x00000001;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> localResourcesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> getLocalResourcesList() {
        if (localResourcesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(localResources_);
        } else {
          return localResourcesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public int getLocalResourcesCount() {
        if (localResourcesBuilder_ == null) {
          return localResources_.size();
        } else {
          return localResourcesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getLocalResources(int index) {
        if (localResourcesBuilder_ == null) {
          return localResources_.get(index);
        } else {
          return localResourcesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public Builder setLocalResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) {
        if (localResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocalResourcesIsMutable();
          localResources_.set(index, value);
          onChanged();
        } else {
          localResourcesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public Builder setLocalResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) {
        if (localResourcesBuilder_ == null) {
          ensureLocalResourcesIsMutable();
          localResources_.set(index, builderForValue.build());
          onChanged();
        } else {
          localResourcesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public Builder addLocalResources(org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) {
        if (localResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocalResourcesIsMutable();
          localResources_.add(value);
          onChanged();
        } else {
          localResourcesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public Builder addLocalResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto value) {
        if (localResourcesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureLocalResourcesIsMutable();
          localResources_.add(index, value);
          onChanged();
        } else {
          localResourcesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public Builder addLocalResources(
          org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) {
        if (localResourcesBuilder_ == null) {
          ensureLocalResourcesIsMutable();
          localResources_.add(builderForValue.build());
          onChanged();
        } else {
          localResourcesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public Builder addLocalResources(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder builderForValue) {
        if (localResourcesBuilder_ == null) {
          ensureLocalResourcesIsMutable();
          localResources_.add(index, builderForValue.build());
          onChanged();
        } else {
          localResourcesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public Builder addAllLocalResources(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto> values) {
        if (localResourcesBuilder_ == null) {
          ensureLocalResourcesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, localResources_);
          onChanged();
        } else {
          localResourcesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public Builder clearLocalResources() {
        if (localResourcesBuilder_ == null) {
          localResources_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000001);
          onChanged();
        } else {
          localResourcesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public Builder removeLocalResources(int index) {
        if (localResourcesBuilder_ == null) {
          ensureLocalResourcesIsMutable();
          localResources_.remove(index);
          onChanged();
        } else {
          localResourcesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder getLocalResourcesBuilder(
          int index) {
        return getLocalResourcesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder getLocalResourcesOrBuilder(
          int index) {
        if (localResourcesBuilder_ == null) {
          return localResources_.get(index);  } else {
          return localResourcesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> 
           getLocalResourcesOrBuilderList() {
        if (localResourcesBuilder_ != null) {
          return localResourcesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(localResources_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder addLocalResourcesBuilder() {
        return getLocalResourcesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder addLocalResourcesBuilder(
          int index) {
        return getLocalResourcesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringLocalResourceMapProto localResources = 1;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder> 
           getLocalResourcesBuilderList() {
        return getLocalResourcesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder> 
          getLocalResourcesFieldBuilder() {
        if (localResourcesBuilder_ == null) {
          localResourcesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder>(
                  localResources_,
                  ((bitField0_ & 0x00000001) != 0),
                  getParentForChildren(),
                  isClean());
          localResources_ = null;
        }
        return localResourcesBuilder_;
      }

      private org.apache.hadoop.thirdparty.protobuf.ByteString tokens_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
      /**
       * <code>optional bytes tokens = 2;</code>
       * @return Whether the tokens field is set.
       */
      @java.lang.Override
      public boolean hasTokens() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional bytes tokens = 2;</code>
       * @return The tokens.
       */
      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.ByteString getTokens() {
        return tokens_;
      }
      /**
       * <code>optional bytes tokens = 2;</code>
       * @param value The tokens to set.
       * @return This builder for chaining.
       */
      public Builder setTokens(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        tokens_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional bytes tokens = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearTokens() {
        bitField0_ = (bitField0_ & ~0x00000002);
        tokens_ = getDefaultInstance().getTokens();
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> serviceData_ =
        java.util.Collections.emptyList();
      private void ensureServiceDataIsMutable() {
        if (!((bitField0_ & 0x00000004) != 0)) {
          serviceData_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto>(serviceData_);
          bitField0_ |= 0x00000004;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> serviceDataBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> getServiceDataList() {
        if (serviceDataBuilder_ == null) {
          return java.util.Collections.unmodifiableList(serviceData_);
        } else {
          return serviceDataBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public int getServiceDataCount() {
        if (serviceDataBuilder_ == null) {
          return serviceData_.size();
        } else {
          return serviceDataBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getServiceData(int index) {
        if (serviceDataBuilder_ == null) {
          return serviceData_.get(index);
        } else {
          return serviceDataBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public Builder setServiceData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) {
        if (serviceDataBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureServiceDataIsMutable();
          serviceData_.set(index, value);
          onChanged();
        } else {
          serviceDataBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public Builder setServiceData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) {
        if (serviceDataBuilder_ == null) {
          ensureServiceDataIsMutable();
          serviceData_.set(index, builderForValue.build());
          onChanged();
        } else {
          serviceDataBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public Builder addServiceData(org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) {
        if (serviceDataBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureServiceDataIsMutable();
          serviceData_.add(value);
          onChanged();
        } else {
          serviceDataBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public Builder addServiceData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto value) {
        if (serviceDataBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureServiceDataIsMutable();
          serviceData_.add(index, value);
          onChanged();
        } else {
          serviceDataBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public Builder addServiceData(
          org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) {
        if (serviceDataBuilder_ == null) {
          ensureServiceDataIsMutable();
          serviceData_.add(builderForValue.build());
          onChanged();
        } else {
          serviceDataBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public Builder addServiceData(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder builderForValue) {
        if (serviceDataBuilder_ == null) {
          ensureServiceDataIsMutable();
          serviceData_.add(index, builderForValue.build());
          onChanged();
        } else {
          serviceDataBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public Builder addAllServiceData(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto> values) {
        if (serviceDataBuilder_ == null) {
          ensureServiceDataIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, serviceData_);
          onChanged();
        } else {
          serviceDataBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public Builder clearServiceData() {
        if (serviceDataBuilder_ == null) {
          serviceData_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
          onChanged();
        } else {
          serviceDataBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public Builder removeServiceData(int index) {
        if (serviceDataBuilder_ == null) {
          ensureServiceDataIsMutable();
          serviceData_.remove(index);
          onChanged();
        } else {
          serviceDataBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder getServiceDataBuilder(
          int index) {
        return getServiceDataFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder getServiceDataOrBuilder(
          int index) {
        if (serviceDataBuilder_ == null) {
          return serviceData_.get(index);  } else {
          return serviceDataBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
           getServiceDataOrBuilderList() {
        if (serviceDataBuilder_ != null) {
          return serviceDataBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(serviceData_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServiceDataBuilder() {
        return getServiceDataFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder addServiceDataBuilder(
          int index) {
        return getServiceDataFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringBytesMapProto service_data = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder> 
           getServiceDataBuilderList() {
        return getServiceDataFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder> 
          getServiceDataFieldBuilder() {
        if (serviceDataBuilder_ == null) {
          serviceDataBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder>(
                  serviceData_,
                  ((bitField0_ & 0x00000004) != 0),
                  getParentForChildren(),
                  isClean());
          serviceData_ = null;
        }
        return serviceDataBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> environment_ =
        java.util.Collections.emptyList();
      private void ensureEnvironmentIsMutable() {
        if (!((bitField0_ & 0x00000008) != 0)) {
          environment_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto>(environment_);
          bitField0_ |= 0x00000008;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> environmentBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getEnvironmentList() {
        if (environmentBuilder_ == null) {
          return java.util.Collections.unmodifiableList(environment_);
        } else {
          return environmentBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public int getEnvironmentCount() {
        if (environmentBuilder_ == null) {
          return environment_.size();
        } else {
          return environmentBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getEnvironment(int index) {
        if (environmentBuilder_ == null) {
          return environment_.get(index);
        } else {
          return environmentBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public Builder setEnvironment(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (environmentBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureEnvironmentIsMutable();
          environment_.set(index, value);
          onChanged();
        } else {
          environmentBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public Builder setEnvironment(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (environmentBuilder_ == null) {
          ensureEnvironmentIsMutable();
          environment_.set(index, builderForValue.build());
          onChanged();
        } else {
          environmentBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public Builder addEnvironment(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (environmentBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureEnvironmentIsMutable();
          environment_.add(value);
          onChanged();
        } else {
          environmentBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public Builder addEnvironment(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (environmentBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureEnvironmentIsMutable();
          environment_.add(index, value);
          onChanged();
        } else {
          environmentBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public Builder addEnvironment(
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (environmentBuilder_ == null) {
          ensureEnvironmentIsMutable();
          environment_.add(builderForValue.build());
          onChanged();
        } else {
          environmentBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public Builder addEnvironment(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (environmentBuilder_ == null) {
          ensureEnvironmentIsMutable();
          environment_.add(index, builderForValue.build());
          onChanged();
        } else {
          environmentBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public Builder addAllEnvironment(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
        if (environmentBuilder_ == null) {
          ensureEnvironmentIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, environment_);
          onChanged();
        } else {
          environmentBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public Builder clearEnvironment() {
        if (environmentBuilder_ == null) {
          environment_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000008);
          onChanged();
        } else {
          environmentBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public Builder removeEnvironment(int index) {
        if (environmentBuilder_ == null) {
          ensureEnvironmentIsMutable();
          environment_.remove(index);
          onChanged();
        } else {
          environmentBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getEnvironmentBuilder(
          int index) {
        return getEnvironmentFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getEnvironmentOrBuilder(
          int index) {
        if (environmentBuilder_ == null) {
          return environment_.get(index);  } else {
          return environmentBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
           getEnvironmentOrBuilderList() {
        if (environmentBuilder_ != null) {
          return environmentBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(environment_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addEnvironmentBuilder() {
        return getEnvironmentFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addEnvironmentBuilder(
          int index) {
        return getEnvironmentFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto environment = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder> 
           getEnvironmentBuilderList() {
        return getEnvironmentFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
          getEnvironmentFieldBuilder() {
        if (environmentBuilder_ == null) {
          environmentBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
                  environment_,
                  ((bitField0_ & 0x00000008) != 0),
                  getParentForChildren(),
                  isClean());
          environment_ = null;
        }
        return environmentBuilder_;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList command_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureCommandIsMutable() {
        if (!command_.isModifiable()) {
          command_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(command_);
        }
        bitField0_ |= 0x00000010;
      }
      /**
       * <code>repeated string command = 5;</code>
       * @return A list containing the command.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getCommandList() {
        command_.makeImmutable();
        return command_;
      }
      /**
       * <code>repeated string command = 5;</code>
       * @return The count of command.
       */
      public int getCommandCount() {
        return command_.size();
      }
      /**
       * <code>repeated string command = 5;</code>
       * @param index The index of the element to return.
       * @return The command at the given index.
       */
      public java.lang.String getCommand(int index) {
        return command_.get(index);
      }
      /**
       * <code>repeated string command = 5;</code>
       * @param index The index of the value to return.
       * @return The bytes of the command at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getCommandBytes(int index) {
        return command_.getByteString(index);
      }
      /**
       * <code>repeated string command = 5;</code>
       * @param index The index to set the value at.
       * @param value The command to set.
       * @return This builder for chaining.
       */
      public Builder setCommand(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureCommandIsMutable();
        command_.set(index, value);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string command = 5;</code>
       * @param value The command to add.
       * @return This builder for chaining.
       */
      public Builder addCommand(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureCommandIsMutable();
        command_.add(value);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string command = 5;</code>
       * @param values The command to add.
       * @return This builder for chaining.
       */
      public Builder addAllCommand(
          java.lang.Iterable<java.lang.String> values) {
        ensureCommandIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, command_);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string command = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearCommand() {
        command_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000010);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string command = 5;</code>
       * @param value The bytes of the command to add.
       * @return This builder for chaining.
       */
      public Builder addCommandBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureCommandIsMutable();
        command_.add(value);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> applicationACLs_ =
        java.util.Collections.emptyList();
      private void ensureApplicationACLsIsMutable() {
        if (!((bitField0_ & 0x00000020) != 0)) {
          applicationACLs_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto>(applicationACLs_);
          bitField0_ |= 0x00000020;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> applicationACLsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> getApplicationACLsList() {
        if (applicationACLsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(applicationACLs_);
        } else {
          return applicationACLsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public int getApplicationACLsCount() {
        if (applicationACLsBuilder_ == null) {
          return applicationACLs_.size();
        } else {
          return applicationACLsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getApplicationACLs(int index) {
        if (applicationACLsBuilder_ == null) {
          return applicationACLs_.get(index);
        } else {
          return applicationACLsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public Builder setApplicationACLs(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) {
        if (applicationACLsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationACLsIsMutable();
          applicationACLs_.set(index, value);
          onChanged();
        } else {
          applicationACLsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public Builder setApplicationACLs(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) {
        if (applicationACLsBuilder_ == null) {
          ensureApplicationACLsIsMutable();
          applicationACLs_.set(index, builderForValue.build());
          onChanged();
        } else {
          applicationACLsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public Builder addApplicationACLs(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) {
        if (applicationACLsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationACLsIsMutable();
          applicationACLs_.add(value);
          onChanged();
        } else {
          applicationACLsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public Builder addApplicationACLs(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) {
        if (applicationACLsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureApplicationACLsIsMutable();
          applicationACLs_.add(index, value);
          onChanged();
        } else {
          applicationACLsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public Builder addApplicationACLs(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) {
        if (applicationACLsBuilder_ == null) {
          ensureApplicationACLsIsMutable();
          applicationACLs_.add(builderForValue.build());
          onChanged();
        } else {
          applicationACLsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public Builder addApplicationACLs(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) {
        if (applicationACLsBuilder_ == null) {
          ensureApplicationACLsIsMutable();
          applicationACLs_.add(index, builderForValue.build());
          onChanged();
        } else {
          applicationACLsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public Builder addAllApplicationACLs(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> values) {
        if (applicationACLsBuilder_ == null) {
          ensureApplicationACLsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, applicationACLs_);
          onChanged();
        } else {
          applicationACLsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public Builder clearApplicationACLs() {
        if (applicationACLsBuilder_ == null) {
          applicationACLs_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000020);
          onChanged();
        } else {
          applicationACLsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public Builder removeApplicationACLs(int index) {
        if (applicationACLsBuilder_ == null) {
          ensureApplicationACLsIsMutable();
          applicationACLs_.remove(index);
          onChanged();
        } else {
          applicationACLsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder getApplicationACLsBuilder(
          int index) {
        return getApplicationACLsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getApplicationACLsOrBuilder(
          int index) {
        if (applicationACLsBuilder_ == null) {
          return applicationACLs_.get(index);  } else {
          return applicationACLsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
           getApplicationACLsOrBuilderList() {
        if (applicationACLsBuilder_ != null) {
          return applicationACLsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(applicationACLs_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder addApplicationACLsBuilder() {
        return getApplicationACLsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder addApplicationACLsBuilder(
          int index) {
        return getApplicationACLsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto application_ACLs = 6;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder> 
           getApplicationACLsBuilderList() {
        return getApplicationACLsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
          getApplicationACLsFieldBuilder() {
        if (applicationACLsBuilder_ == null) {
          applicationACLsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder>(
                  applicationACLs_,
                  ((bitField0_ & 0x00000020) != 0),
                  getParentForChildren(),
                  isClean());
          applicationACLs_ = null;
        }
        return applicationACLsBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto containerRetryContext_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder> containerRetryContextBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
       * @return Whether the containerRetryContext field is set.
       */
      public boolean hasContainerRetryContext() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
       * @return The containerRetryContext.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getContainerRetryContext() {
        if (containerRetryContextBuilder_ == null) {
          return containerRetryContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance() : containerRetryContext_;
        } else {
          return containerRetryContextBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
       */
      public Builder setContainerRetryContext(org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto value) {
        if (containerRetryContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerRetryContext_ = value;
        } else {
          containerRetryContextBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
       */
      public Builder setContainerRetryContext(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder builderForValue) {
        if (containerRetryContextBuilder_ == null) {
          containerRetryContext_ = builderForValue.build();
        } else {
          containerRetryContextBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
       */
      public Builder mergeContainerRetryContext(org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto value) {
        if (containerRetryContextBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0) &&
            containerRetryContext_ != null &&
            containerRetryContext_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance()) {
            getContainerRetryContextBuilder().mergeFrom(value);
          } else {
            containerRetryContext_ = value;
          }
        } else {
          containerRetryContextBuilder_.mergeFrom(value);
        }
        if (containerRetryContext_ != null) {
          bitField0_ |= 0x00000040;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
       */
      public Builder clearContainerRetryContext() {
        bitField0_ = (bitField0_ & ~0x00000040);
        containerRetryContext_ = null;
        if (containerRetryContextBuilder_ != null) {
          containerRetryContextBuilder_.dispose();
          containerRetryContextBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder getContainerRetryContextBuilder() {
        bitField0_ |= 0x00000040;
        onChanged();
        return getContainerRetryContextFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder getContainerRetryContextOrBuilder() {
        if (containerRetryContextBuilder_ != null) {
          return containerRetryContextBuilder_.getMessageOrBuilder();
        } else {
          return containerRetryContext_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance() : containerRetryContext_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryContextProto container_retry_context = 7;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder> 
          getContainerRetryContextFieldBuilder() {
        if (containerRetryContextBuilder_ == null) {
          containerRetryContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder>(
                  getContainerRetryContext(),
                  getParentForChildren(),
                  isClean());
          containerRetryContext_ = null;
        }
        return containerRetryContextBuilder_;
      }

      private org.apache.hadoop.thirdparty.protobuf.ByteString tokensConf_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
      /**
       * <code>optional bytes tokens_conf = 8;</code>
       * @return Whether the tokensConf field is set.
       */
      @java.lang.Override
      public boolean hasTokensConf() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional bytes tokens_conf = 8;</code>
       * @return The tokensConf.
       */
      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.ByteString getTokensConf() {
        return tokensConf_;
      }
      /**
       * <code>optional bytes tokens_conf = 8;</code>
       * @param value The tokensConf to set.
       * @return This builder for chaining.
       */
      public Builder setTokensConf(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        tokensConf_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional bytes tokens_conf = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearTokensConf() {
        bitField0_ = (bitField0_ & ~0x00000080);
        tokensConf_ = getDefaultInstance().getTokensConf();
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerLaunchContextProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerLaunchContextProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerLaunchContextProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerLaunchContextProto>() {
      @java.lang.Override
      public ContainerLaunchContextProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerLaunchContextProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerLaunchContextProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerLaunchContextProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerStatusProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerStatusProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    boolean hasContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId();
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ContainerStateProto state = 2;</code>
     * @return Whether the state field is set.
     */
    boolean hasState();
    /**
     * <code>optional .hadoop.yarn.ContainerStateProto state = 2;</code>
     * @return The state.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getState();

    /**
     * <code>optional string diagnostics = 3 [default = "N/A"];</code>
     * @return Whether the diagnostics field is set.
     */
    boolean hasDiagnostics();
    /**
     * <code>optional string diagnostics = 3 [default = "N/A"];</code>
     * @return The diagnostics.
     */
    java.lang.String getDiagnostics();
    /**
     * <code>optional string diagnostics = 3 [default = "N/A"];</code>
     * @return The bytes for diagnostics.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes();

    /**
     * <code>optional int32 exit_status = 4 [default = -1000];</code>
     * @return Whether the exitStatus field is set.
     */
    boolean hasExitStatus();
    /**
     * <code>optional int32 exit_status = 4 [default = -1000];</code>
     * @return The exitStatus.
     */
    int getExitStatus();

    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     * @return Whether the capability field is set.
     */
    boolean hasCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     * @return The capability.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability();
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder();

    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED];</code>
     * @return Whether the executionType field is set.
     */
    boolean hasExecutionType();
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED];</code>
     * @return The executionType.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType();

    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> 
        getContainerAttributesList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getContainerAttributes(int index);
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
     */
    int getContainerAttributesCount();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getContainerAttributesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getContainerAttributesOrBuilder(
        int index);

    /**
     * <code>optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8;</code>
     * @return Whether the containerSubState field is set.
     */
    boolean hasContainerSubState();
    /**
     * <code>optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8;</code>
     * @return The containerSubState.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto getContainerSubState();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerStatusProto}
   */
  public static final class ContainerStatusProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerStatusProto)
      ContainerStatusProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerStatusProto.newBuilder() to construct.
    private ContainerStatusProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerStatusProto() {
      state_ = 1;
      diagnostics_ = "N/A";
      exitStatus_ = -1000;
      executionType_ = 1;
      containerAttributes_ = java.util.Collections.emptyList();
      containerSubState_ = 1;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerStatusProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerStatusProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerStatusProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder.class);
    }

    private int bitField0_;
    public static final int CONTAINER_ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return Whether the containerId field is set.
     */
    @java.lang.Override
    public boolean hasContainerId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     * @return The containerId.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }
    /**
     * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
      return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
    }

    public static final int STATE_FIELD_NUMBER = 2;
    private int state_ = 1;
    /**
     * <code>optional .hadoop.yarn.ContainerStateProto state = 2;</code>
     * @return Whether the state field is set.
     */
    @java.lang.Override public boolean hasState() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerStateProto state = 2;</code>
     * @return The state.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getState() {
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(state_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result;
    }

    public static final int DIAGNOSTICS_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object diagnostics_ = "N/A";
    /**
     * <code>optional string diagnostics = 3 [default = "N/A"];</code>
     * @return Whether the diagnostics field is set.
     */
    @java.lang.Override
    public boolean hasDiagnostics() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string diagnostics = 3 [default = "N/A"];</code>
     * @return The diagnostics.
     */
    @java.lang.Override
    public java.lang.String getDiagnostics() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          diagnostics_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string diagnostics = 3 [default = "N/A"];</code>
     * @return The bytes for diagnostics.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDiagnosticsBytes() {
      java.lang.Object ref = diagnostics_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        diagnostics_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int EXIT_STATUS_FIELD_NUMBER = 4;
    private int exitStatus_ = -1000;
    /**
     * <code>optional int32 exit_status = 4 [default = -1000];</code>
     * @return Whether the exitStatus field is set.
     */
    @java.lang.Override
    public boolean hasExitStatus() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional int32 exit_status = 4 [default = -1000];</code>
     * @return The exitStatus.
     */
    @java.lang.Override
    public int getExitStatus() {
      return exitStatus_;
    }

    public static final int CAPABILITY_FIELD_NUMBER = 5;
    private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_;
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     * @return Whether the capability field is set.
     */
    @java.lang.Override
    public boolean hasCapability() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     * @return The capability.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() {
      return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
    }
    /**
     * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() {
      return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
    }

    public static final int EXECUTIONTYPE_FIELD_NUMBER = 6;
    private int executionType_ = 1;
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED];</code>
     * @return Whether the executionType field is set.
     */
    @java.lang.Override public boolean hasExecutionType() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED];</code>
     * @return The executionType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
      org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
    }

    public static final int CONTAINER_ATTRIBUTES_FIELD_NUMBER = 7;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> containerAttributes_;
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getContainerAttributesList() {
      return containerAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getContainerAttributesOrBuilderList() {
      return containerAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
     */
    @java.lang.Override
    public int getContainerAttributesCount() {
      return containerAttributes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getContainerAttributes(int index) {
      return containerAttributes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getContainerAttributesOrBuilder(
        int index) {
      return containerAttributes_.get(index);
    }

    public static final int CONTAINER_SUB_STATE_FIELD_NUMBER = 8;
    private int containerSubState_ = 1;
    /**
     * <code>optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8;</code>
     * @return Whether the containerSubState field is set.
     */
    @java.lang.Override public boolean hasContainerSubState() {
      return ((bitField0_ & 0x00000040) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8;</code>
     * @return The containerSubState.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto getContainerSubState() {
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto.forNumber(containerSubState_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto.CSS_SCHEDULED : result;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasCapability()) {
        if (!getCapability().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeEnum(2, state_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, diagnostics_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt32(4, exitStatus_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeMessage(5, getCapability());
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeEnum(6, executionType_);
      }
      for (int i = 0; i < containerAttributes_.size(); i++) {
        output.writeMessage(7, containerAttributes_.get(i));
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        output.writeEnum(8, containerSubState_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getContainerId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(2, state_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, diagnostics_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(4, exitStatus_);
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(5, getCapability());
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(6, executionType_);
      }
      for (int i = 0; i < containerAttributes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, containerAttributes_.get(i));
      }
      if (((bitField0_ & 0x00000040) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(8, containerSubState_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto) obj;

      if (hasContainerId() != other.hasContainerId()) return false;
      if (hasContainerId()) {
        if (!getContainerId()
            .equals(other.getContainerId())) return false;
      }
      if (hasState() != other.hasState()) return false;
      if (hasState()) {
        if (state_ != other.state_) return false;
      }
      if (hasDiagnostics() != other.hasDiagnostics()) return false;
      if (hasDiagnostics()) {
        if (!getDiagnostics()
            .equals(other.getDiagnostics())) return false;
      }
      if (hasExitStatus() != other.hasExitStatus()) return false;
      if (hasExitStatus()) {
        if (getExitStatus()
            != other.getExitStatus()) return false;
      }
      if (hasCapability() != other.hasCapability()) return false;
      if (hasCapability()) {
        if (!getCapability()
            .equals(other.getCapability())) return false;
      }
      if (hasExecutionType() != other.hasExecutionType()) return false;
      if (hasExecutionType()) {
        if (executionType_ != other.executionType_) return false;
      }
      if (!getContainerAttributesList()
          .equals(other.getContainerAttributesList())) return false;
      if (hasContainerSubState() != other.hasContainerSubState()) return false;
      if (hasContainerSubState()) {
        if (containerSubState_ != other.containerSubState_) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasContainerId()) {
        hash = (37 * hash) + CONTAINER_ID_FIELD_NUMBER;
        hash = (53 * hash) + getContainerId().hashCode();
      }
      if (hasState()) {
        hash = (37 * hash) + STATE_FIELD_NUMBER;
        hash = (53 * hash) + state_;
      }
      if (hasDiagnostics()) {
        hash = (37 * hash) + DIAGNOSTICS_FIELD_NUMBER;
        hash = (53 * hash) + getDiagnostics().hashCode();
      }
      if (hasExitStatus()) {
        hash = (37 * hash) + EXIT_STATUS_FIELD_NUMBER;
        hash = (53 * hash) + getExitStatus();
      }
      if (hasCapability()) {
        hash = (37 * hash) + CAPABILITY_FIELD_NUMBER;
        hash = (53 * hash) + getCapability().hashCode();
      }
      if (hasExecutionType()) {
        hash = (37 * hash) + EXECUTIONTYPE_FIELD_NUMBER;
        hash = (53 * hash) + executionType_;
      }
      if (getContainerAttributesCount() > 0) {
        hash = (37 * hash) + CONTAINER_ATTRIBUTES_FIELD_NUMBER;
        hash = (53 * hash) + getContainerAttributesList().hashCode();
      }
      if (hasContainerSubState()) {
        hash = (37 * hash) + CONTAINER_SUB_STATE_FIELD_NUMBER;
        hash = (53 * hash) + containerSubState_;
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerStatusProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerStatusProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerStatusProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerStatusProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getContainerIdFieldBuilder();
          getCapabilityFieldBuilder();
          getContainerAttributesFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        state_ = 1;
        diagnostics_ = "N/A";
        exitStatus_ = -1000;
        capability_ = null;
        if (capabilityBuilder_ != null) {
          capabilityBuilder_.dispose();
          capabilityBuilder_ = null;
        }
        executionType_ = 1;
        if (containerAttributesBuilder_ == null) {
          containerAttributes_ = java.util.Collections.emptyList();
        } else {
          containerAttributes_ = null;
          containerAttributesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000040);
        containerSubState_ = 1;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerStatusProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto result) {
        if (containerAttributesBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0)) {
            containerAttributes_ = java.util.Collections.unmodifiableList(containerAttributes_);
            bitField0_ = (bitField0_ & ~0x00000040);
          }
          result.containerAttributes_ = containerAttributes_;
        } else {
          result.containerAttributes_ = containerAttributesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.containerId_ = containerIdBuilder_ == null
              ? containerId_
              : containerIdBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.state_ = state_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.diagnostics_ = diagnostics_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.exitStatus_ = exitStatus_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.capability_ = capabilityBuilder_ == null
              ? capability_
              : capabilityBuilder_.build();
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.executionType_ = executionType_;
          to_bitField0_ |= 0x00000020;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.containerSubState_ = containerSubState_;
          to_bitField0_ |= 0x00000040;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto.getDefaultInstance()) return this;
        if (other.hasContainerId()) {
          mergeContainerId(other.getContainerId());
        }
        if (other.hasState()) {
          setState(other.getState());
        }
        if (other.hasDiagnostics()) {
          diagnostics_ = other.diagnostics_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasExitStatus()) {
          setExitStatus(other.getExitStatus());
        }
        if (other.hasCapability()) {
          mergeCapability(other.getCapability());
        }
        if (other.hasExecutionType()) {
          setExecutionType(other.getExecutionType());
        }
        if (containerAttributesBuilder_ == null) {
          if (!other.containerAttributes_.isEmpty()) {
            if (containerAttributes_.isEmpty()) {
              containerAttributes_ = other.containerAttributes_;
              bitField0_ = (bitField0_ & ~0x00000040);
            } else {
              ensureContainerAttributesIsMutable();
              containerAttributes_.addAll(other.containerAttributes_);
            }
            onChanged();
          }
        } else {
          if (!other.containerAttributes_.isEmpty()) {
            if (containerAttributesBuilder_.isEmpty()) {
              containerAttributesBuilder_.dispose();
              containerAttributesBuilder_ = null;
              containerAttributes_ = other.containerAttributes_;
              bitField0_ = (bitField0_ & ~0x00000040);
              containerAttributesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getContainerAttributesFieldBuilder() : null;
            } else {
              containerAttributesBuilder_.addAllMessages(other.containerAttributes_);
            }
          }
        }
        if (other.hasContainerSubState()) {
          setContainerSubState(other.getContainerSubState());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasCapability()) {
          if (!getCapability().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getContainerIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(2, tmpRaw);
                } else {
                  state_ = tmpRaw;
                  bitField0_ |= 0x00000002;
                }
                break;
              } // case 16
              case 26: {
                diagnostics_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 32: {
                exitStatus_ = input.readInt32();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              case 42: {
                input.readMessage(
                    getCapabilityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 48: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(6, tmpRaw);
                } else {
                  executionType_ = tmpRaw;
                  bitField0_ |= 0x00000020;
                }
                break;
              } // case 48
              case 58: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER,
                        extensionRegistry);
                if (containerAttributesBuilder_ == null) {
                  ensureContainerAttributesIsMutable();
                  containerAttributes_.add(m);
                } else {
                  containerAttributesBuilder_.addMessage(m);
                }
                break;
              } // case 58
              case 64: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(8, tmpRaw);
                } else {
                  containerSubState_ = tmpRaw;
                  bitField0_ |= 0x00000080;
                }
                break;
              } // case 64
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto containerId_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> containerIdBuilder_;
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return Whether the containerId field is set.
       */
      public boolean hasContainerId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       * @return The containerId.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto getContainerId() {
        if (containerIdBuilder_ == null) {
          return containerId_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        } else {
          return containerIdBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          containerId_ = value;
        } else {
          containerIdBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder setContainerId(
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder builderForValue) {
        if (containerIdBuilder_ == null) {
          containerId_ = builderForValue.build();
        } else {
          containerIdBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder mergeContainerId(org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto value) {
        if (containerIdBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            containerId_ != null &&
            containerId_ != org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance()) {
            getContainerIdBuilder().mergeFrom(value);
          } else {
            containerId_ = value;
          }
        } else {
          containerIdBuilder_.mergeFrom(value);
        }
        if (containerId_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public Builder clearContainerId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        containerId_ = null;
        if (containerIdBuilder_ != null) {
          containerIdBuilder_.dispose();
          containerIdBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder getContainerIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getContainerIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder getContainerIdOrBuilder() {
        if (containerIdBuilder_ != null) {
          return containerIdBuilder_.getMessageOrBuilder();
        } else {
          return containerId_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.getDefaultInstance() : containerId_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ContainerIdProto container_id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder> 
          getContainerIdFieldBuilder() {
        if (containerIdBuilder_ == null) {
          containerIdBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ContainerIdProtoOrBuilder>(
                  getContainerId(),
                  getParentForChildren(),
                  isClean());
          containerId_ = null;
        }
        return containerIdBuilder_;
      }

      private int state_ = 1;
      /**
       * <code>optional .hadoop.yarn.ContainerStateProto state = 2;</code>
       * @return Whether the state field is set.
       */
      @java.lang.Override public boolean hasState() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerStateProto state = 2;</code>
       * @return The state.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto getState() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.forNumber(state_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto.C_NEW : result;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerStateProto state = 2;</code>
       * @param value The state to set.
       * @return This builder for chaining.
       */
      public Builder setState(org.apache.hadoop.yarn.proto.YarnProtos.ContainerStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        state_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerStateProto state = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearState() {
        bitField0_ = (bitField0_ & ~0x00000002);
        state_ = 1;
        onChanged();
        return this;
      }

      private java.lang.Object diagnostics_ = "N/A";
      /**
       * <code>optional string diagnostics = 3 [default = "N/A"];</code>
       * @return Whether the diagnostics field is set.
       */
      public boolean hasDiagnostics() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string diagnostics = 3 [default = "N/A"];</code>
       * @return The diagnostics.
       */
      public java.lang.String getDiagnostics() {
        java.lang.Object ref = diagnostics_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            diagnostics_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 3 [default = "N/A"];</code>
       * @return The bytes for diagnostics.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDiagnosticsBytes() {
        java.lang.Object ref = diagnostics_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          diagnostics_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string diagnostics = 3 [default = "N/A"];</code>
       * @param value The diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnostics(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 3 [default = "N/A"];</code>
       * @return This builder for chaining.
       */
      public Builder clearDiagnostics() {
        diagnostics_ = getDefaultInstance().getDiagnostics();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string diagnostics = 3 [default = "N/A"];</code>
       * @param value The bytes for diagnostics to set.
       * @return This builder for chaining.
       */
      public Builder setDiagnosticsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        diagnostics_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private int exitStatus_ = -1000;
      /**
       * <code>optional int32 exit_status = 4 [default = -1000];</code>
       * @return Whether the exitStatus field is set.
       */
      @java.lang.Override
      public boolean hasExitStatus() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int32 exit_status = 4 [default = -1000];</code>
       * @return The exitStatus.
       */
      @java.lang.Override
      public int getExitStatus() {
        return exitStatus_;
      }
      /**
       * <code>optional int32 exit_status = 4 [default = -1000];</code>
       * @param value The exitStatus to set.
       * @return This builder for chaining.
       */
      public Builder setExitStatus(int value) {

        exitStatus_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 exit_status = 4 [default = -1000];</code>
       * @return This builder for chaining.
       */
      public Builder clearExitStatus() {
        bitField0_ = (bitField0_ & ~0x00000008);
        exitStatus_ = -1000;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto capability_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> capabilityBuilder_;
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       * @return Whether the capability field is set.
       */
      public boolean hasCapability() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       * @return The capability.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto getCapability() {
        if (capabilityBuilder_ == null) {
          return capability_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
        } else {
          return capabilityBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public Builder setCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (capabilityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          capability_ = value;
        } else {
          capabilityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public Builder setCapability(
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder builderForValue) {
        if (capabilityBuilder_ == null) {
          capability_ = builderForValue.build();
        } else {
          capabilityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public Builder mergeCapability(org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto value) {
        if (capabilityBuilder_ == null) {
          if (((bitField0_ & 0x00000010) != 0) &&
            capability_ != null &&
            capability_ != org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance()) {
            getCapabilityBuilder().mergeFrom(value);
          } else {
            capability_ = value;
          }
        } else {
          capabilityBuilder_.mergeFrom(value);
        }
        if (capability_ != null) {
          bitField0_ |= 0x00000010;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public Builder clearCapability() {
        bitField0_ = (bitField0_ & ~0x00000010);
        capability_ = null;
        if (capabilityBuilder_ != null) {
          capabilityBuilder_.dispose();
          capabilityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder getCapabilityBuilder() {
        bitField0_ |= 0x00000010;
        onChanged();
        return getCapabilityFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder getCapabilityOrBuilder() {
        if (capabilityBuilder_ != null) {
          return capabilityBuilder_.getMessageOrBuilder();
        } else {
          return capability_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.getDefaultInstance() : capability_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ResourceProto capability = 5;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder> 
          getCapabilityFieldBuilder() {
        if (capabilityBuilder_ == null) {
          capabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ResourceProtoOrBuilder>(
                  getCapability(),
                  getParentForChildren(),
                  isClean());
          capability_ = null;
        }
        return capabilityBuilder_;
      }

      private int executionType_ = 1;
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED];</code>
       * @return Whether the executionType field is set.
       */
      @java.lang.Override public boolean hasExecutionType() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED];</code>
       * @return The executionType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto getExecutionType() {
        org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto result = org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.forNumber(executionType_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto.GUARANTEED : result;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED];</code>
       * @param value The executionType to set.
       * @return This builder for chaining.
       */
      public Builder setExecutionType(org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000020;
        executionType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ExecutionTypeProto executionType = 6 [default = GUARANTEED];</code>
       * @return This builder for chaining.
       */
      public Builder clearExecutionType() {
        bitField0_ = (bitField0_ & ~0x00000020);
        executionType_ = 1;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> containerAttributes_ =
        java.util.Collections.emptyList();
      private void ensureContainerAttributesIsMutable() {
        if (!((bitField0_ & 0x00000040) != 0)) {
          containerAttributes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto>(containerAttributes_);
          bitField0_ |= 0x00000040;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> containerAttributesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getContainerAttributesList() {
        if (containerAttributesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(containerAttributes_);
        } else {
          return containerAttributesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public int getContainerAttributesCount() {
        if (containerAttributesBuilder_ == null) {
          return containerAttributes_.size();
        } else {
          return containerAttributesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getContainerAttributes(int index) {
        if (containerAttributesBuilder_ == null) {
          return containerAttributes_.get(index);
        } else {
          return containerAttributesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public Builder setContainerAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (containerAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerAttributesIsMutable();
          containerAttributes_.set(index, value);
          onChanged();
        } else {
          containerAttributesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public Builder setContainerAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (containerAttributesBuilder_ == null) {
          ensureContainerAttributesIsMutable();
          containerAttributes_.set(index, builderForValue.build());
          onChanged();
        } else {
          containerAttributesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public Builder addContainerAttributes(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (containerAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerAttributesIsMutable();
          containerAttributes_.add(value);
          onChanged();
        } else {
          containerAttributesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public Builder addContainerAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (containerAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureContainerAttributesIsMutable();
          containerAttributes_.add(index, value);
          onChanged();
        } else {
          containerAttributesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public Builder addContainerAttributes(
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (containerAttributesBuilder_ == null) {
          ensureContainerAttributesIsMutable();
          containerAttributes_.add(builderForValue.build());
          onChanged();
        } else {
          containerAttributesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public Builder addContainerAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (containerAttributesBuilder_ == null) {
          ensureContainerAttributesIsMutable();
          containerAttributes_.add(index, builderForValue.build());
          onChanged();
        } else {
          containerAttributesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public Builder addAllContainerAttributes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
        if (containerAttributesBuilder_ == null) {
          ensureContainerAttributesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, containerAttributes_);
          onChanged();
        } else {
          containerAttributesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public Builder clearContainerAttributes() {
        if (containerAttributesBuilder_ == null) {
          containerAttributes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000040);
          onChanged();
        } else {
          containerAttributesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public Builder removeContainerAttributes(int index) {
        if (containerAttributesBuilder_ == null) {
          ensureContainerAttributesIsMutable();
          containerAttributes_.remove(index);
          onChanged();
        } else {
          containerAttributesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getContainerAttributesBuilder(
          int index) {
        return getContainerAttributesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getContainerAttributesOrBuilder(
          int index) {
        if (containerAttributesBuilder_ == null) {
          return containerAttributes_.get(index);  } else {
          return containerAttributesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
           getContainerAttributesOrBuilderList() {
        if (containerAttributesBuilder_ != null) {
          return containerAttributesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(containerAttributes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addContainerAttributesBuilder() {
        return getContainerAttributesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addContainerAttributesBuilder(
          int index) {
        return getContainerAttributesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto container_attributes = 7;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder> 
           getContainerAttributesBuilderList() {
        return getContainerAttributesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
          getContainerAttributesFieldBuilder() {
        if (containerAttributesBuilder_ == null) {
          containerAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
                  containerAttributes_,
                  ((bitField0_ & 0x00000040) != 0),
                  getParentForChildren(),
                  isClean());
          containerAttributes_ = null;
        }
        return containerAttributesBuilder_;
      }

      private int containerSubState_ = 1;
      /**
       * <code>optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8;</code>
       * @return Whether the containerSubState field is set.
       */
      @java.lang.Override public boolean hasContainerSubState() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8;</code>
       * @return The containerSubState.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto getContainerSubState() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto.forNumber(containerSubState_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto.CSS_SCHEDULED : result;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8;</code>
       * @param value The containerSubState to set.
       * @return This builder for chaining.
       */
      public Builder setContainerSubState(org.apache.hadoop.yarn.proto.YarnProtos.ContainerSubStateProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000080;
        containerSubState_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerSubStateProto container_sub_state = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearContainerSubState() {
        bitField0_ = (bitField0_ & ~0x00000080);
        containerSubState_ = 1;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerStatusProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerStatusProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerStatusProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerStatusProto>() {
      @java.lang.Override
      public ContainerStatusProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerStatusProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerStatusProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ContainerRetryContextProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerRetryContextProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY];</code>
     * @return Whether the retryPolicy field is set.
     */
    boolean hasRetryPolicy();
    /**
     * <code>optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY];</code>
     * @return The retryPolicy.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto getRetryPolicy();

    /**
     * <code>repeated int32 error_codes = 2;</code>
     * @return A list containing the errorCodes.
     */
    java.util.List<java.lang.Integer> getErrorCodesList();
    /**
     * <code>repeated int32 error_codes = 2;</code>
     * @return The count of errorCodes.
     */
    int getErrorCodesCount();
    /**
     * <code>repeated int32 error_codes = 2;</code>
     * @param index The index of the element to return.
     * @return The errorCodes at the given index.
     */
    int getErrorCodes(int index);

    /**
     * <code>optional int32 max_retries = 3 [default = 0];</code>
     * @return Whether the maxRetries field is set.
     */
    boolean hasMaxRetries();
    /**
     * <code>optional int32 max_retries = 3 [default = 0];</code>
     * @return The maxRetries.
     */
    int getMaxRetries();

    /**
     * <code>optional int32 retry_interval = 4 [default = 0];</code>
     * @return Whether the retryInterval field is set.
     */
    boolean hasRetryInterval();
    /**
     * <code>optional int32 retry_interval = 4 [default = 0];</code>
     * @return The retryInterval.
     */
    int getRetryInterval();

    /**
     * <code>optional int64 failures_validity_interval = 5 [default = -1];</code>
     * @return Whether the failuresValidityInterval field is set.
     */
    boolean hasFailuresValidityInterval();
    /**
     * <code>optional int64 failures_validity_interval = 5 [default = -1];</code>
     * @return The failuresValidityInterval.
     */
    long getFailuresValidityInterval();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerRetryContextProto}
   */
  public static final class ContainerRetryContextProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerRetryContextProto)
      ContainerRetryContextProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerRetryContextProto.newBuilder() to construct.
    private ContainerRetryContextProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerRetryContextProto() {
      retryPolicy_ = 0;
      errorCodes_ = emptyIntList();
      failuresValidityInterval_ = -1L;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerRetryContextProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerRetryContextProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder.class);
    }

    private int bitField0_;
    public static final int RETRY_POLICY_FIELD_NUMBER = 1;
    private int retryPolicy_ = 0;
    /**
     * <code>optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY];</code>
     * @return Whether the retryPolicy field is set.
     */
    @java.lang.Override public boolean hasRetryPolicy() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY];</code>
     * @return The retryPolicy.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto getRetryPolicy() {
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto.forNumber(retryPolicy_);
      return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto.NEVER_RETRY : result;
    }

    public static final int ERROR_CODES_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.Internal.IntList errorCodes_ =
        emptyIntList();
    /**
     * <code>repeated int32 error_codes = 2;</code>
     * @return A list containing the errorCodes.
     */
    @java.lang.Override
    public java.util.List<java.lang.Integer>
        getErrorCodesList() {
      return errorCodes_;
    }
    /**
     * <code>repeated int32 error_codes = 2;</code>
     * @return The count of errorCodes.
     */
    public int getErrorCodesCount() {
      return errorCodes_.size();
    }
    /**
     * <code>repeated int32 error_codes = 2;</code>
     * @param index The index of the element to return.
     * @return The errorCodes at the given index.
     */
    public int getErrorCodes(int index) {
      return errorCodes_.getInt(index);
    }

    public static final int MAX_RETRIES_FIELD_NUMBER = 3;
    private int maxRetries_ = 0;
    /**
     * <code>optional int32 max_retries = 3 [default = 0];</code>
     * @return Whether the maxRetries field is set.
     */
    @java.lang.Override
    public boolean hasMaxRetries() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int32 max_retries = 3 [default = 0];</code>
     * @return The maxRetries.
     */
    @java.lang.Override
    public int getMaxRetries() {
      return maxRetries_;
    }

    public static final int RETRY_INTERVAL_FIELD_NUMBER = 4;
    private int retryInterval_ = 0;
    /**
     * <code>optional int32 retry_interval = 4 [default = 0];</code>
     * @return Whether the retryInterval field is set.
     */
    @java.lang.Override
    public boolean hasRetryInterval() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int32 retry_interval = 4 [default = 0];</code>
     * @return The retryInterval.
     */
    @java.lang.Override
    public int getRetryInterval() {
      return retryInterval_;
    }

    public static final int FAILURES_VALIDITY_INTERVAL_FIELD_NUMBER = 5;
    private long failuresValidityInterval_ = -1L;
    /**
     * <code>optional int64 failures_validity_interval = 5 [default = -1];</code>
     * @return Whether the failuresValidityInterval field is set.
     */
    @java.lang.Override
    public boolean hasFailuresValidityInterval() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional int64 failures_validity_interval = 5 [default = -1];</code>
     * @return The failuresValidityInterval.
     */
    @java.lang.Override
    public long getFailuresValidityInterval() {
      return failuresValidityInterval_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, retryPolicy_);
      }
      for (int i = 0; i < errorCodes_.size(); i++) {
        output.writeInt32(2, errorCodes_.getInt(i));
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt32(3, maxRetries_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt32(4, retryInterval_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt64(5, failuresValidityInterval_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, retryPolicy_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < errorCodes_.size(); i++) {
          dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
            .computeInt32SizeNoTag(errorCodes_.getInt(i));
        }
        size += dataSize;
        size += 1 * getErrorCodesList().size();
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(3, maxRetries_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(4, retryInterval_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(5, failuresValidityInterval_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto other = (org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto) obj;

      if (hasRetryPolicy() != other.hasRetryPolicy()) return false;
      if (hasRetryPolicy()) {
        if (retryPolicy_ != other.retryPolicy_) return false;
      }
      if (!getErrorCodesList()
          .equals(other.getErrorCodesList())) return false;
      if (hasMaxRetries() != other.hasMaxRetries()) return false;
      if (hasMaxRetries()) {
        if (getMaxRetries()
            != other.getMaxRetries()) return false;
      }
      if (hasRetryInterval() != other.hasRetryInterval()) return false;
      if (hasRetryInterval()) {
        if (getRetryInterval()
            != other.getRetryInterval()) return false;
      }
      if (hasFailuresValidityInterval() != other.hasFailuresValidityInterval()) return false;
      if (hasFailuresValidityInterval()) {
        if (getFailuresValidityInterval()
            != other.getFailuresValidityInterval()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasRetryPolicy()) {
        hash = (37 * hash) + RETRY_POLICY_FIELD_NUMBER;
        hash = (53 * hash) + retryPolicy_;
      }
      if (getErrorCodesCount() > 0) {
        hash = (37 * hash) + ERROR_CODES_FIELD_NUMBER;
        hash = (53 * hash) + getErrorCodesList().hashCode();
      }
      if (hasMaxRetries()) {
        hash = (37 * hash) + MAX_RETRIES_FIELD_NUMBER;
        hash = (53 * hash) + getMaxRetries();
      }
      if (hasRetryInterval()) {
        hash = (37 * hash) + RETRY_INTERVAL_FIELD_NUMBER;
        hash = (53 * hash) + getRetryInterval();
      }
      if (hasFailuresValidityInterval()) {
        hash = (37 * hash) + FAILURES_VALIDITY_INTERVAL_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFailuresValidityInterval());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerRetryContextProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerRetryContextProto)
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerRetryContextProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.class, org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        retryPolicy_ = 0;
        errorCodes_ = emptyIntList();
        maxRetries_ = 0;
        retryInterval_ = 0;
        failuresValidityInterval_ = -1L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto result = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.retryPolicy_ = retryPolicy_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          errorCodes_.makeImmutable();
          result.errorCodes_ = errorCodes_;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.maxRetries_ = maxRetries_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.retryInterval_ = retryInterval_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.failuresValidityInterval_ = failuresValidityInterval_;
          to_bitField0_ |= 0x00000008;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto.getDefaultInstance()) return this;
        if (other.hasRetryPolicy()) {
          setRetryPolicy(other.getRetryPolicy());
        }
        if (!other.errorCodes_.isEmpty()) {
          if (errorCodes_.isEmpty()) {
            errorCodes_ = other.errorCodes_;
            errorCodes_.makeImmutable();
            bitField0_ |= 0x00000002;
          } else {
            ensureErrorCodesIsMutable();
            errorCodes_.addAll(other.errorCodes_);
          }
          onChanged();
        }
        if (other.hasMaxRetries()) {
          setMaxRetries(other.getMaxRetries());
        }
        if (other.hasRetryInterval()) {
          setRetryInterval(other.getRetryInterval());
        }
        if (other.hasFailuresValidityInterval()) {
          setFailuresValidityInterval(other.getFailuresValidityInterval());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto tmpValue =
                    org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  retryPolicy_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 16: {
                int v = input.readInt32();
                ensureErrorCodesIsMutable();
                errorCodes_.addInt(v);
                break;
              } // case 16
              case 18: {
                int length = input.readRawVarint32();
                int limit = input.pushLimit(length);
                ensureErrorCodesIsMutable();
                while (input.getBytesUntilLimit() > 0) {
                  errorCodes_.addInt(input.readInt32());
                }
                input.popLimit(limit);
                break;
              } // case 18
              case 24: {
                maxRetries_ = input.readInt32();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              case 32: {
                retryInterval_ = input.readInt32();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              case 40: {
                failuresValidityInterval_ = input.readInt64();
                bitField0_ |= 0x00000010;
                break;
              } // case 40
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int retryPolicy_ = 0;
      /**
       * <code>optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY];</code>
       * @return Whether the retryPolicy field is set.
       */
      @java.lang.Override public boolean hasRetryPolicy() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY];</code>
       * @return The retryPolicy.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto getRetryPolicy() {
        org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto result = org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto.forNumber(retryPolicy_);
        return result == null ? org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto.NEVER_RETRY : result;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY];</code>
       * @param value The retryPolicy to set.
       * @return This builder for chaining.
       */
      public Builder setRetryPolicy(org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryPolicyProto value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        retryPolicy_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ContainerRetryPolicyProto retry_policy = 1 [default = NEVER_RETRY];</code>
       * @return This builder for chaining.
       */
      public Builder clearRetryPolicy() {
        bitField0_ = (bitField0_ & ~0x00000001);
        retryPolicy_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.Internal.IntList errorCodes_ = emptyIntList();
      private void ensureErrorCodesIsMutable() {
        if (!errorCodes_.isModifiable()) {
          errorCodes_ = makeMutableCopy(errorCodes_);
        }
        bitField0_ |= 0x00000002;
      }
      /**
       * <code>repeated int32 error_codes = 2;</code>
       * @return A list containing the errorCodes.
       */
      public java.util.List<java.lang.Integer>
          getErrorCodesList() {
        errorCodes_.makeImmutable();
        return errorCodes_;
      }
      /**
       * <code>repeated int32 error_codes = 2;</code>
       * @return The count of errorCodes.
       */
      public int getErrorCodesCount() {
        return errorCodes_.size();
      }
      /**
       * <code>repeated int32 error_codes = 2;</code>
       * @param index The index of the element to return.
       * @return The errorCodes at the given index.
       */
      public int getErrorCodes(int index) {
        return errorCodes_.getInt(index);
      }
      /**
       * <code>repeated int32 error_codes = 2;</code>
       * @param index The index to set the value at.
       * @param value The errorCodes to set.
       * @return This builder for chaining.
       */
      public Builder setErrorCodes(
          int index, int value) {

        ensureErrorCodesIsMutable();
        errorCodes_.setInt(index, value);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>repeated int32 error_codes = 2;</code>
       * @param value The errorCodes to add.
       * @return This builder for chaining.
       */
      public Builder addErrorCodes(int value) {

        ensureErrorCodesIsMutable();
        errorCodes_.addInt(value);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>repeated int32 error_codes = 2;</code>
       * @param values The errorCodes to add.
       * @return This builder for chaining.
       */
      public Builder addAllErrorCodes(
          java.lang.Iterable<? extends java.lang.Integer> values) {
        ensureErrorCodesIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, errorCodes_);
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>repeated int32 error_codes = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearErrorCodes() {
        errorCodes_ = emptyIntList();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }

      private int maxRetries_ ;
      /**
       * <code>optional int32 max_retries = 3 [default = 0];</code>
       * @return Whether the maxRetries field is set.
       */
      @java.lang.Override
      public boolean hasMaxRetries() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int32 max_retries = 3 [default = 0];</code>
       * @return The maxRetries.
       */
      @java.lang.Override
      public int getMaxRetries() {
        return maxRetries_;
      }
      /**
       * <code>optional int32 max_retries = 3 [default = 0];</code>
       * @param value The maxRetries to set.
       * @return This builder for chaining.
       */
      public Builder setMaxRetries(int value) {

        maxRetries_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 max_retries = 3 [default = 0];</code>
       * @return This builder for chaining.
       */
      public Builder clearMaxRetries() {
        bitField0_ = (bitField0_ & ~0x00000004);
        maxRetries_ = 0;
        onChanged();
        return this;
      }

      private int retryInterval_ ;
      /**
       * <code>optional int32 retry_interval = 4 [default = 0];</code>
       * @return Whether the retryInterval field is set.
       */
      @java.lang.Override
      public boolean hasRetryInterval() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int32 retry_interval = 4 [default = 0];</code>
       * @return The retryInterval.
       */
      @java.lang.Override
      public int getRetryInterval() {
        return retryInterval_;
      }
      /**
       * <code>optional int32 retry_interval = 4 [default = 0];</code>
       * @param value The retryInterval to set.
       * @return This builder for chaining.
       */
      public Builder setRetryInterval(int value) {

        retryInterval_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 retry_interval = 4 [default = 0];</code>
       * @return This builder for chaining.
       */
      public Builder clearRetryInterval() {
        bitField0_ = (bitField0_ & ~0x00000008);
        retryInterval_ = 0;
        onChanged();
        return this;
      }

      private long failuresValidityInterval_ = -1L;
      /**
       * <code>optional int64 failures_validity_interval = 5 [default = -1];</code>
       * @return Whether the failuresValidityInterval field is set.
       */
      @java.lang.Override
      public boolean hasFailuresValidityInterval() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional int64 failures_validity_interval = 5 [default = -1];</code>
       * @return The failuresValidityInterval.
       */
      @java.lang.Override
      public long getFailuresValidityInterval() {
        return failuresValidityInterval_;
      }
      /**
       * <code>optional int64 failures_validity_interval = 5 [default = -1];</code>
       * @param value The failuresValidityInterval to set.
       * @return This builder for chaining.
       */
      public Builder setFailuresValidityInterval(long value) {

        failuresValidityInterval_ = value;
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 failures_validity_interval = 5 [default = -1];</code>
       * @return This builder for chaining.
       */
      public Builder clearFailuresValidityInterval() {
        bitField0_ = (bitField0_ & ~0x00000010);
        failuresValidityInterval_ = -1L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerRetryContextProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerRetryContextProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerRetryContextProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerRetryContextProto>() {
      @java.lang.Override
      public ContainerRetryContextProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerRetryContextProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerRetryContextProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ContainerRetryContextProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StringLocalResourceMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StringLocalResourceMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string key = 1;</code>
     * @return Whether the key field is set.
     */
    boolean hasKey();
    /**
     * <code>optional string key = 1;</code>
     * @return The key.
     */
    java.lang.String getKey();
    /**
     * <code>optional string key = 1;</code>
     * @return The bytes for key.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes();

    /**
     * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
     * @return Whether the value field is set.
     */
    boolean hasValue();
    /**
     * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
     * @return The value.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getValue();
    /**
     * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder getValueOrBuilder();
  }
  /**
   * <pre>
   *&#47;/////////////////////////////////////////////////////////////////////
   * //// From common//////////////////////////////////////////////////////
   * //////////////////////////////////////////////////////////////////////
   * </pre>
   *
   * Protobuf type {@code hadoop.yarn.StringLocalResourceMapProto}
   */
  public static final class StringLocalResourceMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StringLocalResourceMapProto)
      StringLocalResourceMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StringLocalResourceMapProto.newBuilder() to construct.
    private StringLocalResourceMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StringLocalResourceMapProto() {
      key_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StringLocalResourceMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLocalResourceMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int KEY_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object key_ = "";
    /**
     * <code>optional string key = 1;</code>
     * @return Whether the key field is set.
     */
    @java.lang.Override
    public boolean hasKey() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string key = 1;</code>
     * @return The key.
     */
    @java.lang.Override
    public java.lang.String getKey() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          key_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string key = 1;</code>
     * @return The bytes for key.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        key_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int VALUE_FIELD_NUMBER = 2;
    private org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto value_;
    /**
     * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
     * @return Whether the value field is set.
     */
    @java.lang.Override
    public boolean hasValue() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
     * @return The value.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getValue() {
      return value_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : value_;
    }
    /**
     * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder getValueOrBuilder() {
      return value_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : value_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getValue());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getValue());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto) obj;

      if (hasKey() != other.hasKey()) return false;
      if (hasKey()) {
        if (!getKey()
            .equals(other.getKey())) return false;
      }
      if (hasValue() != other.hasValue()) return false;
      if (hasValue()) {
        if (!getValue()
            .equals(other.getValue())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasKey()) {
        hash = (37 * hash) + KEY_FIELD_NUMBER;
        hash = (53 * hash) + getKey().hashCode();
      }
      if (hasValue()) {
        hash = (37 * hash) + VALUE_FIELD_NUMBER;
        hash = (53 * hash) + getValue().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * <pre>
     *&#47;/////////////////////////////////////////////////////////////////////
     * //// From common//////////////////////////////////////////////////////
     * //////////////////////////////////////////////////////////////////////
     * </pre>
     *
     * Protobuf type {@code hadoop.yarn.StringLocalResourceMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StringLocalResourceMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLocalResourceMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getValueFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        key_ = "";
        value_ = null;
        if (valueBuilder_ != null) {
          valueBuilder_.dispose();
          valueBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.key_ = key_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.value_ = valueBuilder_ == null
              ? value_
              : valueBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto.getDefaultInstance()) return this;
        if (other.hasKey()) {
          key_ = other.key_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasValue()) {
          mergeValue(other.getValue());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                key_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getValueFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object key_ = "";
      /**
       * <code>optional string key = 1;</code>
       * @return Whether the key field is set.
       */
      public boolean hasKey() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string key = 1;</code>
       * @return The key.
       */
      public java.lang.String getKey() {
        java.lang.Object ref = key_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            key_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string key = 1;</code>
       * @return The bytes for key.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getKeyBytes() {
        java.lang.Object ref = key_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          key_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string key = 1;</code>
       * @param value The key to set.
       * @return This builder for chaining.
       */
      public Builder setKey(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string key = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearKey() {
        key_ = getDefaultInstance().getKey();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string key = 1;</code>
       * @param value The bytes for key to set.
       * @return This builder for chaining.
       */
      public Builder setKeyBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto value_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder> valueBuilder_;
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
       * @return Whether the value field is set.
       */
      public boolean hasValue() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
       * @return The value.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getValue() {
        if (valueBuilder_ == null) {
          return value_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : value_;
        } else {
          return valueBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
       */
      public Builder setValue(org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto value) {
        if (valueBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          value_ = value;
        } else {
          valueBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
       */
      public Builder setValue(
          org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder builderForValue) {
        if (valueBuilder_ == null) {
          value_ = builderForValue.build();
        } else {
          valueBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
       */
      public Builder mergeValue(org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto value) {
        if (valueBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            value_ != null &&
            value_ != org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance()) {
            getValueBuilder().mergeFrom(value);
          } else {
            value_ = value;
          }
        } else {
          valueBuilder_.mergeFrom(value);
        }
        if (value_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
       */
      public Builder clearValue() {
        bitField0_ = (bitField0_ & ~0x00000002);
        value_ = null;
        if (valueBuilder_ != null) {
          valueBuilder_.dispose();
          valueBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder getValueBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getValueFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder getValueOrBuilder() {
        if (valueBuilder_ != null) {
          return valueBuilder_.getMessageOrBuilder();
        } else {
          return value_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : value_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto value = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder> 
          getValueFieldBuilder() {
        if (valueBuilder_ == null) {
          valueBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder>(
                  getValue(),
                  getParentForChildren(),
                  isClean());
          value_ = null;
        }
        return valueBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StringLocalResourceMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StringLocalResourceMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StringLocalResourceMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StringLocalResourceMapProto>() {
      @java.lang.Override
      public StringLocalResourceMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StringLocalResourceMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StringLocalResourceMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringLocalResourceMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StringStringMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StringStringMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string key = 1;</code>
     * @return Whether the key field is set.
     */
    boolean hasKey();
    /**
     * <code>optional string key = 1;</code>
     * @return The key.
     */
    java.lang.String getKey();
    /**
     * <code>optional string key = 1;</code>
     * @return The bytes for key.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes();

    /**
     * <code>optional string value = 2;</code>
     * @return Whether the value field is set.
     */
    boolean hasValue();
    /**
     * <code>optional string value = 2;</code>
     * @return The value.
     */
    java.lang.String getValue();
    /**
     * <code>optional string value = 2;</code>
     * @return The bytes for value.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getValueBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.StringStringMapProto}
   */
  public static final class StringStringMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StringStringMapProto)
      StringStringMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StringStringMapProto.newBuilder() to construct.
    private StringStringMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StringStringMapProto() {
      key_ = "";
      value_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StringStringMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringStringMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringStringMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int KEY_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object key_ = "";
    /**
     * <code>optional string key = 1;</code>
     * @return Whether the key field is set.
     */
    @java.lang.Override
    public boolean hasKey() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string key = 1;</code>
     * @return The key.
     */
    @java.lang.Override
    public java.lang.String getKey() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          key_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string key = 1;</code>
     * @return The bytes for key.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        key_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int VALUE_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object value_ = "";
    /**
     * <code>optional string value = 2;</code>
     * @return Whether the value field is set.
     */
    @java.lang.Override
    public boolean hasValue() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string value = 2;</code>
     * @return The value.
     */
    @java.lang.Override
    public java.lang.String getValue() {
      java.lang.Object ref = value_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          value_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string value = 2;</code>
     * @return The bytes for value.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getValueBytes() {
      java.lang.Object ref = value_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        value_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, value_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, value_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto) obj;

      if (hasKey() != other.hasKey()) return false;
      if (hasKey()) {
        if (!getKey()
            .equals(other.getKey())) return false;
      }
      if (hasValue() != other.hasValue()) return false;
      if (hasValue()) {
        if (!getValue()
            .equals(other.getValue())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasKey()) {
        hash = (37 * hash) + KEY_FIELD_NUMBER;
        hash = (53 * hash) + getKey().hashCode();
      }
      if (hasValue()) {
        hash = (37 * hash) + VALUE_FIELD_NUMBER;
        hash = (53 * hash) + getValue().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StringStringMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StringStringMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringStringMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringStringMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        key_ = "";
        value_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringStringMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.key_ = key_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.value_ = value_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance()) return this;
        if (other.hasKey()) {
          key_ = other.key_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasValue()) {
          value_ = other.value_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                key_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                value_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object key_ = "";
      /**
       * <code>optional string key = 1;</code>
       * @return Whether the key field is set.
       */
      public boolean hasKey() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string key = 1;</code>
       * @return The key.
       */
      public java.lang.String getKey() {
        java.lang.Object ref = key_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            key_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string key = 1;</code>
       * @return The bytes for key.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getKeyBytes() {
        java.lang.Object ref = key_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          key_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string key = 1;</code>
       * @param value The key to set.
       * @return This builder for chaining.
       */
      public Builder setKey(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string key = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearKey() {
        key_ = getDefaultInstance().getKey();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string key = 1;</code>
       * @param value The bytes for key to set.
       * @return This builder for chaining.
       */
      public Builder setKeyBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object value_ = "";
      /**
       * <code>optional string value = 2;</code>
       * @return Whether the value field is set.
       */
      public boolean hasValue() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string value = 2;</code>
       * @return The value.
       */
      public java.lang.String getValue() {
        java.lang.Object ref = value_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            value_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string value = 2;</code>
       * @return The bytes for value.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getValueBytes() {
        java.lang.Object ref = value_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          value_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string value = 2;</code>
       * @param value The value to set.
       * @return This builder for chaining.
       */
      public Builder setValue(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        value_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string value = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearValue() {
        value_ = getDefaultInstance().getValue();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string value = 2;</code>
       * @param value The bytes for value to set.
       * @return This builder for chaining.
       */
      public Builder setValueBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        value_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StringStringMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StringStringMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StringStringMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StringStringMapProto>() {
      @java.lang.Override
      public StringStringMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StringStringMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StringStringMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface StringBytesMapProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.StringBytesMapProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string key = 1;</code>
     * @return Whether the key field is set.
     */
    boolean hasKey();
    /**
     * <code>optional string key = 1;</code>
     * @return The key.
     */
    java.lang.String getKey();
    /**
     * <code>optional string key = 1;</code>
     * @return The bytes for key.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes();

    /**
     * <code>optional bytes value = 2;</code>
     * @return Whether the value field is set.
     */
    boolean hasValue();
    /**
     * <code>optional bytes value = 2;</code>
     * @return The value.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString getValue();
  }
  /**
   * Protobuf type {@code hadoop.yarn.StringBytesMapProto}
   */
  public static final class StringBytesMapProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.StringBytesMapProto)
      StringBytesMapProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use StringBytesMapProto.newBuilder() to construct.
    private StringBytesMapProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private StringBytesMapProto() {
      key_ = "";
      value_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new StringBytesMapProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringBytesMapProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringBytesMapProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder.class);
    }

    private int bitField0_;
    public static final int KEY_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object key_ = "";
    /**
     * <code>optional string key = 1;</code>
     * @return Whether the key field is set.
     */
    @java.lang.Override
    public boolean hasKey() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string key = 1;</code>
     * @return The key.
     */
    @java.lang.Override
    public java.lang.String getKey() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          key_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string key = 1;</code>
     * @return The bytes for key.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getKeyBytes() {
      java.lang.Object ref = key_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        key_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int VALUE_FIELD_NUMBER = 2;
    private org.apache.hadoop.thirdparty.protobuf.ByteString value_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
    /**
     * <code>optional bytes value = 2;</code>
     * @return Whether the value field is set.
     */
    @java.lang.Override
    public boolean hasValue() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional bytes value = 2;</code>
     * @return The value.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString getValue() {
      return value_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeBytes(2, value_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, key_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBytesSize(2, value_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto other = (org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto) obj;

      if (hasKey() != other.hasKey()) return false;
      if (hasKey()) {
        if (!getKey()
            .equals(other.getKey())) return false;
      }
      if (hasValue() != other.hasValue()) return false;
      if (hasValue()) {
        if (!getValue()
            .equals(other.getValue())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasKey()) {
        hash = (37 * hash) + KEY_FIELD_NUMBER;
        hash = (53 * hash) + getKey().hashCode();
      }
      if (hasValue()) {
        hash = (37 * hash) + VALUE_FIELD_NUMBER;
        hash = (53 * hash) + getValue().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.StringBytesMapProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.StringBytesMapProto)
        org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringBytesMapProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringBytesMapProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.class, org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        key_ = "";
        value_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_StringBytesMapProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto result = new org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.key_ = key_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.value_ = value_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto.getDefaultInstance()) return this;
        if (other.hasKey()) {
          key_ = other.key_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasValue()) {
          setValue(other.getValue());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                key_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                value_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object key_ = "";
      /**
       * <code>optional string key = 1;</code>
       * @return Whether the key field is set.
       */
      public boolean hasKey() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string key = 1;</code>
       * @return The key.
       */
      public java.lang.String getKey() {
        java.lang.Object ref = key_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            key_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string key = 1;</code>
       * @return The bytes for key.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getKeyBytes() {
        java.lang.Object ref = key_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          key_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string key = 1;</code>
       * @param value The key to set.
       * @return This builder for chaining.
       */
      public Builder setKey(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string key = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearKey() {
        key_ = getDefaultInstance().getKey();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string key = 1;</code>
       * @param value The bytes for key to set.
       * @return This builder for chaining.
       */
      public Builder setKeyBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        key_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.ByteString value_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
      /**
       * <code>optional bytes value = 2;</code>
       * @return Whether the value field is set.
       */
      @java.lang.Override
      public boolean hasValue() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional bytes value = 2;</code>
       * @return The value.
       */
      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.ByteString getValue() {
        return value_;
      }
      /**
       * <code>optional bytes value = 2;</code>
       * @param value The value to set.
       * @return This builder for chaining.
       */
      public Builder setValue(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        value_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional bytes value = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearValue() {
        bitField0_ = (bitField0_ & ~0x00000002);
        value_ = getDefaultInstance().getValue();
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.StringBytesMapProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.StringBytesMapProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<StringBytesMapProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<StringBytesMapProto>() {
      @java.lang.Override
      public StringBytesMapProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<StringBytesMapProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<StringBytesMapProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringBytesMapProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface CollectorInfoProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.CollectorInfoProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string collector_addr = 1;</code>
     * @return Whether the collectorAddr field is set.
     */
    boolean hasCollectorAddr();
    /**
     * <code>optional string collector_addr = 1;</code>
     * @return The collectorAddr.
     */
    java.lang.String getCollectorAddr();
    /**
     * <code>optional string collector_addr = 1;</code>
     * @return The bytes for collectorAddr.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getCollectorAddrBytes();

    /**
     * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
     * @return Whether the collectorToken field is set.
     */
    boolean hasCollectorToken();
    /**
     * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
     * @return The collectorToken.
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProto getCollectorToken();
    /**
     * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
     */
    org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getCollectorTokenOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.CollectorInfoProto}
   */
  public static final class CollectorInfoProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.CollectorInfoProto)
      CollectorInfoProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use CollectorInfoProto.newBuilder() to construct.
    private CollectorInfoProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private CollectorInfoProto() {
      collectorAddr_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new CollectorInfoProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CollectorInfoProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CollectorInfoProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder.class);
    }

    private int bitField0_;
    public static final int COLLECTOR_ADDR_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object collectorAddr_ = "";
    /**
     * <code>optional string collector_addr = 1;</code>
     * @return Whether the collectorAddr field is set.
     */
    @java.lang.Override
    public boolean hasCollectorAddr() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string collector_addr = 1;</code>
     * @return The collectorAddr.
     */
    @java.lang.Override
    public java.lang.String getCollectorAddr() {
      java.lang.Object ref = collectorAddr_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          collectorAddr_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string collector_addr = 1;</code>
     * @return The bytes for collectorAddr.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getCollectorAddrBytes() {
      java.lang.Object ref = collectorAddr_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        collectorAddr_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int COLLECTOR_TOKEN_FIELD_NUMBER = 2;
    private org.apache.hadoop.security.proto.SecurityProtos.TokenProto collectorToken_;
    /**
     * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
     * @return Whether the collectorToken field is set.
     */
    @java.lang.Override
    public boolean hasCollectorToken() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
     * @return The collectorToken.
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getCollectorToken() {
      return collectorToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : collectorToken_;
    }
    /**
     * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getCollectorTokenOrBuilder() {
      return collectorToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : collectorToken_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (hasCollectorToken()) {
        if (!getCollectorToken().isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, collectorAddr_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeMessage(2, getCollectorToken());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, collectorAddr_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, getCollectorToken());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto other = (org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto) obj;

      if (hasCollectorAddr() != other.hasCollectorAddr()) return false;
      if (hasCollectorAddr()) {
        if (!getCollectorAddr()
            .equals(other.getCollectorAddr())) return false;
      }
      if (hasCollectorToken() != other.hasCollectorToken()) return false;
      if (hasCollectorToken()) {
        if (!getCollectorToken()
            .equals(other.getCollectorToken())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasCollectorAddr()) {
        hash = (37 * hash) + COLLECTOR_ADDR_FIELD_NUMBER;
        hash = (53 * hash) + getCollectorAddr().hashCode();
      }
      if (hasCollectorToken()) {
        hash = (37 * hash) + COLLECTOR_TOKEN_FIELD_NUMBER;
        hash = (53 * hash) + getCollectorToken().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.CollectorInfoProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.CollectorInfoProto)
        org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CollectorInfoProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CollectorInfoProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.class, org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getCollectorTokenFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        collectorAddr_ = "";
        collectorToken_ = null;
        if (collectorTokenBuilder_ != null) {
          collectorTokenBuilder_.dispose();
          collectorTokenBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.internal_static_hadoop_yarn_CollectorInfoProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto build() {
        org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto result = new org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.collectorAddr_ = collectorAddr_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.collectorToken_ = collectorTokenBuilder_ == null
              ? collectorToken_
              : collectorTokenBuilder_.build();
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto.getDefaultInstance()) return this;
        if (other.hasCollectorAddr()) {
          collectorAddr_ = other.collectorAddr_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasCollectorToken()) {
          mergeCollectorToken(other.getCollectorToken());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (hasCollectorToken()) {
          if (!getCollectorToken().isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                collectorAddr_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                input.readMessage(
                    getCollectorTokenFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object collectorAddr_ = "";
      /**
       * <code>optional string collector_addr = 1;</code>
       * @return Whether the collectorAddr field is set.
       */
      public boolean hasCollectorAddr() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string collector_addr = 1;</code>
       * @return The collectorAddr.
       */
      public java.lang.String getCollectorAddr() {
        java.lang.Object ref = collectorAddr_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            collectorAddr_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string collector_addr = 1;</code>
       * @return The bytes for collectorAddr.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getCollectorAddrBytes() {
        java.lang.Object ref = collectorAddr_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          collectorAddr_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string collector_addr = 1;</code>
       * @param value The collectorAddr to set.
       * @return This builder for chaining.
       */
      public Builder setCollectorAddr(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        collectorAddr_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string collector_addr = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearCollectorAddr() {
        collectorAddr_ = getDefaultInstance().getCollectorAddr();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string collector_addr = 1;</code>
       * @param value The bytes for collectorAddr to set.
       * @return This builder for chaining.
       */
      public Builder setCollectorAddrBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        collectorAddr_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private org.apache.hadoop.security.proto.SecurityProtos.TokenProto collectorToken_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> collectorTokenBuilder_;
      /**
       * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
       * @return Whether the collectorToken field is set.
       */
      public boolean hasCollectorToken() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
       * @return The collectorToken.
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto getCollectorToken() {
        if (collectorTokenBuilder_ == null) {
          return collectorToken_ == null ? org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : collectorToken_;
        } else {
          return collectorTokenBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
       */
      public Builder setCollectorToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (collectorTokenBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          collectorToken_ = value;
        } else {
          collectorTokenBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
       */
      public Builder setCollectorToken(
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder builderForValue) {
        if (collectorTokenBuilder_ == null) {
          collectorToken_ = builderForValue.build();
        } else {
          collectorTokenBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
       */
      public Builder mergeCollectorToken(org.apache.hadoop.security.proto.SecurityProtos.TokenProto value) {
        if (collectorTokenBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0) &&
            collectorToken_ != null &&
            collectorToken_ != org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance()) {
            getCollectorTokenBuilder().mergeFrom(value);
          } else {
            collectorToken_ = value;
          }
        } else {
          collectorTokenBuilder_.mergeFrom(value);
        }
        if (collectorToken_ != null) {
          bitField0_ |= 0x00000002;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
       */
      public Builder clearCollectorToken() {
        bitField0_ = (bitField0_ & ~0x00000002);
        collectorToken_ = null;
        if (collectorTokenBuilder_ != null) {
          collectorTokenBuilder_.dispose();
          collectorTokenBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder getCollectorTokenBuilder() {
        bitField0_ |= 0x00000002;
        onChanged();
        return getCollectorTokenFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
       */
      public org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder getCollectorTokenOrBuilder() {
        if (collectorTokenBuilder_ != null) {
          return collectorTokenBuilder_.getMessageOrBuilder();
        } else {
          return collectorToken_ == null ?
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto.getDefaultInstance() : collectorToken_;
        }
      }
      /**
       * <code>optional .hadoop.common.TokenProto collector_token = 2;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder> 
          getCollectorTokenFieldBuilder() {
        if (collectorTokenBuilder_ == null) {
          collectorTokenBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.security.proto.SecurityProtos.TokenProto, org.apache.hadoop.security.proto.SecurityProtos.TokenProto.Builder, org.apache.hadoop.security.proto.SecurityProtos.TokenProtoOrBuilder>(
                  getCollectorToken(),
                  getParentForChildren(),
                  isClean());
          collectorToken_ = null;
        }
        return collectorTokenBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.CollectorInfoProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.CollectorInfoProto)
    private static final org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<CollectorInfoProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<CollectorInfoProto>() {
      @java.lang.Override
      public CollectorInfoProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<CollectorInfoProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<CollectorInfoProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.CollectorInfoProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_SerializedExceptionProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_SerializedExceptionProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationIdProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationIdProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationAttemptIdProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerIdProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerIdProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceInformationProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceInformationProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceTypeInfoProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceUtilizationProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceOptionProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceOptionProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceProfileEntry_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceProfileEntry_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceProfilesProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceProfilesProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeResourceMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeResourceMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_PriorityProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_PriorityProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerReportProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerReportProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_URLProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_URLProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_LocalResourceProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_LocalResourceProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StringLongMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StringLongMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StringFloatMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StringFloatMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationReportProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationReportProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_AppTimeoutsMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationTimeoutProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationAttemptReportProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeIdProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeIdProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeReportProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeReportProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeIdToLabelsProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_LabelsToNodeIdsProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeLabelProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeLabelProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeAttributeKeyProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeAttributeProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeAttributeProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeAttributeInfoProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeToAttributeValueProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_AttributeToNodesProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_AttributeToNodesProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeToAttributesProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeToAttributesProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_DeregisterSubClustersProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_FederationQueueWeightProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_FederationSubClusterProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_FederationSubClusterProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ExecutionTypeRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_SchedulingRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_SchedulingRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceSizingProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceSizingProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_RejectedSchedulingRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_PreemptionMessageProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_PreemptionMessageProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StrictPreemptionContractProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_PreemptionContractProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_PreemptionContractProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_PreemptionContainerProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_PreemptionContainerProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_PreemptionResourceRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceBlacklistRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationSubmissionContextProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationTimeoutMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_LogAggregationContextProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_LogAggregationContextProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ApplicationACLMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_YarnClusterMetricsProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_QueueStatisticsProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_QueueStatisticsProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_QueueInfoProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_QueueInfoProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_QueueConfigurationsProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_QueueConfigurationsMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_QueueUserACLInfoProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_PlacementConstraintProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_PlacementConstraintProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_SimplePlacementConstraintProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_PlacementConstraintTargetProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_TimedPlacementConstraintProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_CompositePlacementConstraintProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationIdProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationIdProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationRequestsProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationRequestsProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationDefinitionProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ResourceAllocationRequestProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ReservationAllocationStateProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerLaunchContextProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerStatusProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerStatusProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerRetryContextProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StringLocalResourceMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StringStringMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StringStringMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_StringBytesMapProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_StringBytesMapProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_CollectorInfoProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_CollectorInfoProto_fieldAccessorTable;

  public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static  org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\021yarn_protos.proto\022\013hadoop.yarn\032\016Securi" +
      "ty.proto\"\204\001\n\030SerializedExceptionProto\022\017\n" +
      "\007message\030\001 \001(\t\022\r\n\005trace\030\002 \001(\t\022\022\n\nclass_n" +
      "ame\030\003 \001(\t\0224\n\005cause\030\004 \001(\0132%.hadoop.yarn.S" +
      "erializedExceptionProto\";\n\022ApplicationId" +
      "Proto\022\n\n\002id\030\001 \001(\005\022\031\n\021cluster_timestamp\030\002" +
      " \001(\003\"g\n\031ApplicationAttemptIdProto\0227\n\016app" +
      "lication_id\030\001 \001(\0132\037.hadoop.yarn.Applicat" +
      "ionIdProto\022\021\n\tattemptId\030\002 \001(\005\"\217\001\n\020Contai" +
      "nerIdProto\022/\n\006app_id\030\001 \001(\0132\037.hadoop.yarn" +
      ".ApplicationIdProto\022>\n\016app_attempt_id\030\002 " +
      "\001(\0132&.hadoop.yarn.ApplicationAttemptIdPr" +
      "oto\022\n\n\002id\030\003 \001(\003\"\271\001\n\030ResourceInformationP" +
      "roto\022\013\n\003key\030\001 \002(\t\022\r\n\005value\030\002 \001(\003\022\r\n\005unit" +
      "s\030\003 \001(\t\022-\n\004type\030\004 \001(\0162\037.hadoop.yarn.Reso" +
      "urceTypesProto\022\014\n\004tags\030\005 \003(\t\0225\n\nattribut" +
      "es\030\006 \003(\0132!.hadoop.yarn.StringStringMapPr" +
      "oto\"c\n\025ResourceTypeInfoProto\022\014\n\004name\030\001 \002" +
      "(\t\022\r\n\005units\030\002 \001(\t\022-\n\004type\030\003 \001(\0162\037.hadoop" +
      ".yarn.ResourceTypesProto\"y\n\rResourceProt" +
      "o\022\016\n\006memory\030\001 \001(\003\022\025\n\rvirtual_cores\030\002 \001(\005" +
      "\022A\n\022resource_value_map\030\003 \003(\0132%.hadoop.ya" +
      "rn.ResourceInformationProto\"~\n\030ResourceU" +
      "tilizationProto\022\014\n\004pmem\030\001 \001(\005\022\014\n\004vmem\030\002 " +
      "\001(\005\022\013\n\003cpu\030\003 \001(\002\0229\n\017customResources\030\004 \003(" +
      "\0132 .hadoop.yarn.StringFloatMapProto\"`\n\023R" +
      "esourceOptionProto\022,\n\010resource\030\001 \001(\0132\032.h" +
      "adoop.yarn.ResourceProto\022\033\n\023over_commit_" +
      "timeout\030\002 \001(\005\"S\n\024ResourceProfileEntry\022\014\n" +
      "\004name\030\001 \002(\t\022-\n\tresources\030\002 \002(\0132\032.hadoop." +
      "yarn.ResourceProto\"Y\n\025ResourceProfilesPr" +
      "oto\022@\n\025resource_profiles_map\030\001 \003(\0132!.had" +
      "oop.yarn.ResourceProfileEntry\"|\n\024NodeRes" +
      "ourceMapProto\022)\n\007node_id\030\001 \001(\0132\030.hadoop." +
      "yarn.NodeIdProto\0229\n\017resource_option\030\002 \001(" +
      "\0132 .hadoop.yarn.ResourceOptionProto\"!\n\rP" +
      "riorityProto\022\020\n\010priority\030\001 \001(\005\"\274\003\n\016Conta" +
      "inerProto\022)\n\002id\030\001 \001(\0132\035.hadoop.yarn.Cont" +
      "ainerIdProto\022(\n\006nodeId\030\002 \001(\0132\030.hadoop.ya" +
      "rn.NodeIdProto\022\031\n\021node_http_address\030\003 \001(" +
      "\t\022,\n\010resource\030\004 \001(\0132\032.hadoop.yarn.Resour" +
      "ceProto\022,\n\010priority\030\005 \001(\0132\032.hadoop.yarn." +
      "PriorityProto\0222\n\017container_token\030\006 \001(\0132\031" +
      ".hadoop.common.TokenProto\022C\n\016execution_t" +
      "ype\030\007 \001(\0162\037.hadoop.yarn.ExecutionTypePro" +
      "to:\nGUARANTEED\022!\n\025allocation_request_id\030" +
      "\010 \001(\003:\002-1\022\022\n\007version\030\t \001(\005:\0010\022\027\n\017allocat" +
      "ion_tags\030\n \003(\t\022\025\n\rexposed_ports\030\013 \001(\t\"\376\003" +
      "\n\024ContainerReportProto\0223\n\014container_id\030\001" +
      " \001(\0132\035.hadoop.yarn.ContainerIdProto\022,\n\010r" +
      "esource\030\002 \001(\0132\032.hadoop.yarn.ResourceProt" +
      "o\022)\n\007node_id\030\003 \001(\0132\030.hadoop.yarn.NodeIdP" +
      "roto\022,\n\010priority\030\004 \001(\0132\032.hadoop.yarn.Pri" +
      "orityProto\022\025\n\rcreation_time\030\005 \001(\003\022\023\n\013fin" +
      "ish_time\030\006 \001(\003\022\035\n\020diagnostics_info\030\007 \001(\t" +
      ":\003N/A\022\017\n\007log_url\030\010 \001(\t\022\035\n\025container_exit" +
      "_status\030\t \001(\005\0229\n\017container_state\030\n \001(\0162 " +
      ".hadoop.yarn.ContainerStateProto\022\031\n\021node" +
      "_http_address\030\013 \001(\t\022B\n\rexecutionType\030\014 \001" +
      "(\0162\037.hadoop.yarn.ExecutionTypeProto:\nGUA" +
      "RANTEED\022\025\n\rexposed_ports\030\r \001(\t\"V\n\010URLPro" +
      "to\022\016\n\006scheme\030\001 \001(\t\022\014\n\004host\030\002 \001(\t\022\014\n\004port" +
      "\030\003 \001(\005\022\014\n\004file\030\004 \001(\t\022\020\n\010userInfo\030\005 \001(\t\"\215" +
      "\002\n\022LocalResourceProto\022\'\n\010resource\030\001 \001(\0132" +
      "\025.hadoop.yarn.URLProto\022\014\n\004size\030\002 \001(\003\022\021\n\t" +
      "timestamp\030\003 \001(\003\0221\n\004type\030\004 \001(\0162#.hadoop.y" +
      "arn.LocalResourceTypeProto\022=\n\nvisibility" +
      "\030\005 \001(\0162).hadoop.yarn.LocalResourceVisibi" +
      "lityProto\022\017\n\007pattern\030\006 \001(\t\022*\n\"should_be_" +
      "uploaded_to_shared_cache\030\007 \001(\010\"0\n\022String" +
      "LongMapProto\022\013\n\003key\030\001 \002(\t\022\r\n\005value\030\002 \002(\003" +
      "\"1\n\023StringFloatMapProto\022\013\n\003key\030\001 \002(\t\022\r\n\005" +
      "value\030\002 \002(\002\"\325\004\n#ApplicationResourceUsage" +
      "ReportProto\022\033\n\023num_used_containers\030\001 \001(\005" +
      "\022\037\n\027num_reserved_containers\030\002 \001(\005\0222\n\016use" +
      "d_resources\030\003 \001(\0132\032.hadoop.yarn.Resource" +
      "Proto\0226\n\022reserved_resources\030\004 \001(\0132\032.hado" +
      "op.yarn.ResourceProto\0224\n\020needed_resource" +
      "s\030\005 \001(\0132\032.hadoop.yarn.ResourceProto\022\026\n\016m" +
      "emory_seconds\030\006 \001(\003\022\025\n\rvcore_seconds\030\007 \001" +
      "(\003\022\036\n\026queue_usage_percentage\030\010 \001(\002\022 \n\030cl" +
      "uster_usage_percentage\030\t \001(\002\022 \n\030preempte" +
      "d_memory_seconds\030\n \001(\003\022\037\n\027preempted_vcor" +
      "e_seconds\030\013 \001(\003\022G\n\036application_resource_" +
      "usage_map\030\014 \003(\0132\037.hadoop.yarn.StringLong" +
      "MapProto\022Q\n(application_preempted_resour" +
      "ce_usage_map\030\r \003(\0132\037.hadoop.yarn.StringL" +
      "ongMapProto\"\267\010\n\026ApplicationReportProto\0226" +
      "\n\rapplicationId\030\001 \001(\0132\037.hadoop.yarn.Appl" +
      "icationIdProto\022\014\n\004user\030\002 \001(\t\022\r\n\005queue\030\003 " +
      "\001(\t\022\014\n\004name\030\004 \001(\t\022\014\n\004host\030\005 \001(\t\022\020\n\010rpc_p" +
      "ort\030\006 \001(\005\0225\n\022client_to_am_token\030\007 \001(\0132\031." +
      "hadoop.common.TokenProto\022F\n\026yarn_applica" +
      "tion_state\030\010 \001(\0162&.hadoop.yarn.YarnAppli" +
      "cationStateProto\022\023\n\013trackingUrl\030\t \001(\t\022\030\n" +
      "\013diagnostics\030\n \001(\t:\003N/A\022\021\n\tstartTime\030\013 \001" +
      "(\003\022\022\n\nfinishTime\030\014 \001(\003\022J\n\030final_applicat" +
      "ion_status\030\r \001(\0162(.hadoop.yarn.FinalAppl" +
      "icationStatusProto\022L\n\022app_resource_Usage" +
      "\030\016 \001(\01320.hadoop.yarn.ApplicationResource" +
      "UsageReportProto\022\033\n\023originalTrackingUrl\030" +
      "\017 \001(\t\022K\n\033currentApplicationAttemptId\030\020 \001" +
      "(\0132&.hadoop.yarn.ApplicationAttemptIdPro" +
      "to\022\020\n\010progress\030\021 \001(\002\022\027\n\017applicationType\030" +
      "\022 \001(\t\022.\n\013am_rm_token\030\023 \001(\0132\031.hadoop.comm" +
      "on.TokenProto\022\027\n\017applicationTags\030\024 \003(\t\022F" +
      "\n\026log_aggregation_status\030\025 \001(\0162&.hadoop." +
      "yarn.LogAggregationStatusProto\022$\n\025unmana" +
      "ged_application\030\026 \001(\010:\005false\022,\n\010priority" +
      "\030\027 \001(\0132\032.hadoop.yarn.PriorityProto\022\036\n\026ap" +
      "pNodeLabelExpression\030\030 \001(\t\022\035\n\025amNodeLabe" +
      "lExpression\030\031 \001(\t\0225\n\013appTimeouts\030\032 \003(\0132 " +
      ".hadoop.yarn.AppTimeoutsMapProto\022\022\n\nlaun" +
      "chTime\030\033 \001(\003\022\022\n\nsubmitTime\030\034 \001(\003\022\023\n\013rmCl" +
      "usterId\030\035 \001(\t\"\244\001\n\023AppTimeoutsMapProto\022J\n" +
      "\030application_timeout_type\030\001 \001(\0162(.hadoop" +
      ".yarn.ApplicationTimeoutTypeProto\022A\n\023app" +
      "lication_timeout\030\002 \001(\0132$.hadoop.yarn.App" +
      "licationTimeoutProto\"\222\001\n\027ApplicationTime" +
      "outProto\022J\n\030application_timeout_type\030\001 \002" +
      "(\0162(.hadoop.yarn.ApplicationTimeoutTypeP" +
      "roto\022\023\n\013expire_time\030\002 \001(\t\022\026\n\016remaining_t" +
      "ime\030\003 \001(\003\"\214\003\n\035ApplicationAttemptReportPr" +
      "oto\022F\n\026application_attempt_id\030\001 \001(\0132&.ha" +
      "doop.yarn.ApplicationAttemptIdProto\022\014\n\004h" +
      "ost\030\002 \001(\t\022\020\n\010rpc_port\030\003 \001(\005\022\024\n\014tracking_" +
      "url\030\004 \001(\t\022\030\n\013diagnostics\030\005 \001(\t:\003N/A\022U\n\036y" +
      "arn_application_attempt_state\030\006 \001(\0162-.ha" +
      "doop.yarn.YarnApplicationAttemptStatePro" +
      "to\0226\n\017am_container_id\030\007 \001(\0132\035.hadoop.yar" +
      "n.ContainerIdProto\022\035\n\025original_tracking_" +
      "url\030\010 \001(\t\022\021\n\tstartTime\030\t \001(\003\022\022\n\nfinishTi" +
      "me\030\n \001(\003\")\n\013NodeIdProto\022\014\n\004host\030\001 \001(\t\022\014\n" +
      "\004port\030\002 \001(\005\"\360\004\n\017NodeReportProto\022(\n\006nodeI" +
      "d\030\001 \001(\0132\030.hadoop.yarn.NodeIdProto\022\023\n\013htt" +
      "pAddress\030\002 \001(\t\022\020\n\010rackName\030\003 \001(\t\022(\n\004used" +
      "\030\004 \001(\0132\032.hadoop.yarn.ResourceProto\022.\n\nca" +
      "pability\030\005 \001(\0132\032.hadoop.yarn.ResourcePro" +
      "to\022\025\n\rnumContainers\030\006 \001(\005\022/\n\nnode_state\030" +
      "\007 \001(\0162\033.hadoop.yarn.NodeStateProto\022\025\n\rhe" +
      "alth_report\030\010 \001(\t\022\037\n\027last_health_report_" +
      "time\030\t \001(\003\022\023\n\013node_labels\030\n \003(\t\022E\n\026conta" +
      "iners_utilization\030\013 \001(\0132%.hadoop.yarn.Re" +
      "sourceUtilizationProto\022?\n\020node_utilizati" +
      "on\030\014 \001(\0132%.hadoop.yarn.ResourceUtilizati" +
      "onProto\022\037\n\027decommissioning_timeout\030\r \001(\r" +
      "\022:\n\020node_update_type\030\016 \001(\0162 .hadoop.yarn" +
      ".NodeUpdateTypeProto\0228\n\017node_attributes\030" +
      "\017 \003(\0132\037.hadoop.yarn.NodeAttributeProto\"S" +
      "\n\023NodeIdToLabelsProto\022(\n\006nodeId\030\001 \001(\0132\030." +
      "hadoop.yarn.NodeIdProto\022\022\n\nnodeLabels\030\002 " +
      "\003(\t\"T\n\024LabelsToNodeIdsProto\022\022\n\nnodeLabel" +
      "s\030\001 \001(\t\022(\n\006nodeId\030\002 \003(\0132\030.hadoop.yarn.No" +
      "deIdProto\"9\n\016NodeLabelProto\022\014\n\004name\030\001 \001(" +
      "\t\022\031\n\013isExclusive\030\002 \001(\010:\004true\"S\n\025NodeAttr" +
      "ibuteKeyProto\022#\n\017attributePrefix\030\001 \001(\t:\n" +
      "rm.yarn.io\022\025\n\rattributeName\030\002 \002(\t\"\254\001\n\022No" +
      "deAttributeProto\0228\n\014attributeKey\030\001 \002(\0132\"" +
      ".hadoop.yarn.NodeAttributeKeyProto\022B\n\rat" +
      "tributeType\030\002 \001(\0162#.hadoop.yarn.NodeAttr" +
      "ibuteTypeProto:\006STRING\022\030\n\016attributeValue" +
      "\030\003 \001(\t:\000\"\216\001\n\026NodeAttributeInfoProto\0228\n\014a" +
      "ttributeKey\030\001 \002(\0132\".hadoop.yarn.NodeAttr" +
      "ibuteKeyProto\022:\n\rattributeType\030\002 \002(\0162#.h" +
      "adoop.yarn.NodeAttributeTypeProto\"E\n\031Nod" +
      "eToAttributeValueProto\022\020\n\010hostname\030\001 \002(\t" +
      "\022\026\n\016attributeValue\030\002 \002(\t\"\220\001\n\025AttributeTo" +
      "NodesProto\0229\n\rnodeAttribute\030\001 \002(\0132\".hado" +
      "op.yarn.NodeAttributeKeyProto\022<\n\014nodeVal" +
      "ueMap\030\002 \003(\0132&.hadoop.yarn.NodeToAttribut" +
      "eValueProto\"^\n\025NodeToAttributesProto\022\014\n\004" +
      "node\030\001 \001(\t\0227\n\016nodeAttributes\030\002 \003(\0132\037.had" +
      "oop.yarn.NodeAttributeProto\"\224\001\n\032Deregist" +
      "erSubClustersProto\022\024\n\014subClusterId\030\001 \001(\t" +
      "\022\027\n\017deregisterState\030\002 \001(\t\022\031\n\021lastHeartBe" +
      "atTime\030\003 \001(\t\022\023\n\013information\030\004 \001(\t\022\027\n\017sub" +
      "ClusterState\030\005 \001(\t\"\214\001\n\032FederationQueueWe" +
      "ightProto\022\024\n\014routerWeight\030\001 \001(\t\022\022\n\namrmW" +
      "eight\030\002 \001(\t\022\025\n\rheadRoomAlpha\030\003 \001(\t\022\r\n\005qu" +
      "eue\030\004 \001(\t\022\036\n\026policyManagerClassName\030\005 \001(" +
      "\t\"e\n\031FederationSubClusterProto\022\024\n\014subClu" +
      "sterId\030\001 \001(\t\022\031\n\021lastHeartBeatTime\030\002 \001(\t\022" +
      "\027\n\017subClusterState\030\003 \001(\t\"\313\002\n\024ResourceReq" +
      "uestProto\022,\n\010priority\030\001 \001(\0132\032.hadoop.yar" +
      "n.PriorityProto\022\025\n\rresource_name\030\002 \001(\t\022." +
      "\n\ncapability\030\003 \001(\0132\032.hadoop.yarn.Resourc" +
      "eProto\022\026\n\016num_containers\030\004 \001(\005\022\034\n\016relax_" +
      "locality\030\005 \001(\010:\004true\022\035\n\025node_label_expre" +
      "ssion\030\006 \001(\t\022F\n\026execution_type_request\030\007 " +
      "\001(\0132&.hadoop.yarn.ExecutionTypeRequestPr" +
      "oto\022!\n\025allocation_request_id\030\010 \001(\003:\002-1\"\207" +
      "\001\n\031ExecutionTypeRequestProto\022C\n\016executio" +
      "n_type\030\001 \001(\0162\037.hadoop.yarn.ExecutionType" +
      "Proto:\nGUARANTEED\022%\n\026enforce_execution_t" +
      "ype\030\002 \001(\010:\005false\"\273\002\n\026SchedulingRequestPr" +
      "oto\022\036\n\023allocationRequestId\030\001 \001(\003:\0010\022,\n\010p" +
      "riority\030\002 \001(\0132\032.hadoop.yarn.PriorityProt" +
      "o\022=\n\rexecutionType\030\003 \001(\0132&.hadoop.yarn.E" +
      "xecutionTypeRequestProto\022\026\n\016allocationTa" +
      "gs\030\004 \003(\t\0228\n\016resourceSizing\030\005 \001(\0132 .hadoo" +
      "p.yarn.ResourceSizingProto\022B\n\023placementC" +
      "onstraint\030\006 \001(\0132%.hadoop.yarn.PlacementC" +
      "onstraintProto\"\\\n\023ResourceSizingProto\022\026\n" +
      "\016numAllocations\030\001 \001(\005\022-\n\tresources\030\002 \001(\013" +
      "2\032.hadoop.yarn.ResourceProto\"\211\001\n\036Rejecte" +
      "dSchedulingRequestProto\0221\n\006reason\030\001 \002(\0162" +
      "!.hadoop.yarn.RejectionReasonProto\0224\n\007re" +
      "quest\030\002 \002(\0132#.hadoop.yarn.SchedulingRequ" +
      "estProto\"\224\001\n\026PreemptionMessageProto\022B\n\016s" +
      "trictContract\030\001 \001(\0132*.hadoop.yarn.Strict" +
      "PreemptionContractProto\0226\n\010contract\030\002 \001(" +
      "\0132$.hadoop.yarn.PreemptionContractProto\"" +
      "Y\n\035StrictPreemptionContractProto\0228\n\tcont" +
      "ainer\030\001 \003(\0132%.hadoop.yarn.PreemptionCont" +
      "ainerProto\"\222\001\n\027PreemptionContractProto\022=" +
      "\n\010resource\030\001 \003(\0132+.hadoop.yarn.Preemptio" +
      "nResourceRequestProto\0228\n\tcontainer\030\002 \003(\013" +
      "2%.hadoop.yarn.PreemptionContainerProto\"" +
      "E\n\030PreemptionContainerProto\022)\n\002id\030\001 \001(\0132" +
      "\035.hadoop.yarn.ContainerIdProto\"U\n\036Preemp" +
      "tionResourceRequestProto\0223\n\010resource\030\001 \001" +
      "(\0132!.hadoop.yarn.ResourceRequestProto\"X\n" +
      "\035ResourceBlacklistRequestProto\022\033\n\023blackl" +
      "ist_additions\030\001 \003(\t\022\032\n\022blacklist_removal" +
      "s\030\002 \003(\t\"\274\007\n!ApplicationSubmissionContext" +
      "Proto\0227\n\016application_id\030\001 \001(\0132\037.hadoop.y" +
      "arn.ApplicationIdProto\022\035\n\020application_na" +
      "me\030\002 \001(\t:\003N/A\022\026\n\005queue\030\003 \001(\t:\007default\022,\n" +
      "\010priority\030\004 \001(\0132\032.hadoop.yarn.PriorityPr" +
      "oto\022C\n\021am_container_spec\030\005 \001(\0132(.hadoop." +
      "yarn.ContainerLaunchContextProto\022)\n\033canc" +
      "el_tokens_when_complete\030\006 \001(\010:\004true\022\033\n\014u" +
      "nmanaged_am\030\007 \001(\010:\005false\022\031\n\016maxAppAttemp" +
      "ts\030\010 \001(\005:\0010\022,\n\010resource\030\t \001(\0132\032.hadoop.y" +
      "arn.ResourceProto\022\035\n\017applicationType\030\n \001" +
      "(\t:\004YARN\022:\n+keep_containers_across_appli" +
      "cation_attempts\030\013 \001(\010:\005false\022\027\n\017applicat" +
      "ionTags\030\014 \003(\t\022.\n\"attempt_failures_validi" +
      "ty_interval\030\r \001(\003:\002-1\022H\n\027log_aggregation" +
      "_context\030\016 \001(\0132\'.hadoop.yarn.LogAggregat" +
      "ionContextProto\0227\n\016reservation_id\030\017 \001(\0132" +
      "\037.hadoop.yarn.ReservationIdProto\022\035\n\025node" +
      "_label_expression\030\020 \001(\t\022H\n\035am_container_" +
      "resource_request\030\021 \003(\0132!.hadoop.yarn.Res" +
      "ourceRequestProto\022E\n\024application_timeout" +
      "s\030\022 \003(\0132\'.hadoop.yarn.ApplicationTimeout" +
      "MapProto\022L\n!application_scheduling_prope" +
      "rties\030\023 \003(\0132!.hadoop.yarn.StringStringMa" +
      "pProto\"y\n\032ApplicationTimeoutMapProto\022J\n\030" +
      "application_timeout_type\030\001 \001(\0162(.hadoop." +
      "yarn.ApplicationTimeoutTypeProto\022\017\n\007time" +
      "out\030\002 \001(\003\"\203\001\n ApplicationUpdateTimeoutMa" +
      "pProto\022J\n\030application_timeout_type\030\001 \001(\016" +
      "2(.hadoop.yarn.ApplicationTimeoutTypePro" +
      "to\022\023\n\013expire_time\030\002 \001(\t\"\372\001\n\032LogAggregati" +
      "onContextProto\022\033\n\017include_pattern\030\001 \001(\t:" +
      "\002.*\022\031\n\017exclude_pattern\030\002 \001(\t:\000\022%\n\033rolled" +
      "_logs_include_pattern\030\003 \001(\t:\000\022\'\n\033rolled_" +
      "logs_exclude_pattern\030\004 \001(\t:\002.*\022)\n!log_ag" +
      "gregation_policy_class_name\030\005 \001(\t\022)\n!log" +
      "_aggregation_policy_parameters\030\006 \001(\t\"e\n\026" +
      "ApplicationACLMapProto\022;\n\naccessType\030\001 \001" +
      "(\0162\'.hadoop.yarn.ApplicationAccessTypePr" +
      "oto\022\016\n\003acl\030\002 \001(\t:\001 \"\362\001\n\027YarnClusterMetri" +
      "csProto\022\031\n\021num_node_managers\030\001 \001(\005\022\036\n\026nu" +
      "m_decommissioned_nms\030\002 \001(\005\022\026\n\016num_active" +
      "_nms\030\003 \001(\005\022\024\n\014num_lost_nms\030\004 \001(\005\022\031\n\021num_" +
      "unhealthy_nms\030\005 \001(\005\022\030\n\020num_rebooted_nms\030" +
      "\006 \001(\005\022\037\n\027num_decommissioning_nms\030\007 \001(\005\022\030" +
      "\n\020num_shutdown_nms\030\010 \001(\005\"\336\003\n\024QueueStatis" +
      "ticsProto\022\030\n\020numAppsSubmitted\030\001 \001(\003\022\026\n\016n" +
      "umAppsRunning\030\002 \001(\003\022\026\n\016numAppsPending\030\003 " +
      "\001(\003\022\030\n\020numAppsCompleted\030\004 \001(\003\022\025\n\rnumApps" +
      "Killed\030\005 \001(\003\022\025\n\rnumAppsFailed\030\006 \001(\003\022\026\n\016n" +
      "umActiveUsers\030\007 \001(\003\022\031\n\021availableMemoryMB" +
      "\030\010 \001(\003\022\031\n\021allocatedMemoryMB\030\t \001(\003\022\027\n\017pen" +
      "dingMemoryMB\030\n \001(\003\022\030\n\020reservedMemoryMB\030\013" +
      " \001(\003\022\027\n\017availableVCores\030\014 \001(\003\022\027\n\017allocat" +
      "edVCores\030\r \001(\003\022\025\n\rpendingVCores\030\016 \001(\003\022\026\n" +
      "\016reservedVCores\030\017 \001(\003\022\033\n\023allocatedContai" +
      "ners\030\020 \001(\003\022\031\n\021pendingContainers\030\021 \001(\003\022\032\n" +
      "\022reservedContainers\030\022 \001(\003\"\361\006\n\016QueueInfoP" +
      "roto\022\021\n\tqueueName\030\001 \001(\t\022\020\n\010capacity\030\002 \001(" +
      "\002\022\027\n\017maximumCapacity\030\003 \001(\002\022\027\n\017currentCap" +
      "acity\030\004 \001(\002\022+\n\005state\030\005 \001(\0162\034.hadoop.yarn" +
      ".QueueStateProto\0220\n\013childQueues\030\006 \003(\0132\033." +
      "hadoop.yarn.QueueInfoProto\0229\n\014applicatio" +
      "ns\030\007 \003(\0132#.hadoop.yarn.ApplicationReport" +
      "Proto\022\034\n\024accessibleNodeLabels\030\010 \003(\t\022\"\n\032d" +
      "efaultNodeLabelExpression\030\t \001(\t\022:\n\017queue" +
      "Statistics\030\n \001(\0132!.hadoop.yarn.QueueStat" +
      "isticsProto\022\032\n\022preemptionDisabled\030\013 \001(\010\022" +
      "H\n\026queueConfigurationsMap\030\014 \003(\0132(.hadoop" +
      ".yarn.QueueConfigurationsMapProto\022$\n\034int" +
      "raQueuePreemptionDisabled\030\r \001(\010\022\016\n\006weigh" +
      "t\030\016 \001(\002\022\021\n\tqueuePath\030\017 \001(\t\022\027\n\017maxParalle" +
      "lApps\030\020 \001(\005\022\025\n\rschedulerType\030\021 \001(\t\022\030\n\020mi" +
      "nResourceVCore\030\022 \001(\005\022\031\n\021minResourceMemor" +
      "y\030\023 \001(\003\022\030\n\020maxResourceVCore\030\024 \001(\005\022\031\n\021max" +
      "ResourceMemory\030\025 \001(\003\022\035\n\025reservedResource" +
      "VCore\030\026 \001(\005\022\036\n\026reservedResourceMemory\030\027 " +
      "\001(\003\022\034\n\024steadyFairShareVCore\030\030 \001(\005\022\035\n\025ste" +
      "adyFairShareMemory\030\031 \001(\003\022\024\n\014subClusterId" +
      "\030\032 \001(\t\022\025\n\rmaxRunningApp\030\033 \001(\005\"\373\002\n\030QueueC" +
      "onfigurationsProto\022\020\n\010capacity\030\001 \001(\002\022\030\n\020" +
      "absoluteCapacity\030\002 \001(\002\022\023\n\013maxCapacity\030\003 " +
      "\001(\002\022\033\n\023absoluteMaxCapacity\030\004 \001(\002\022\027\n\017maxA" +
      "MPercentage\030\005 \001(\002\0228\n\024effectiveMinCapacit" +
      "y\030\006 \001(\0132\032.hadoop.yarn.ResourceProto\0228\n\024e" +
      "ffectiveMaxCapacity\030\007 \001(\0132\032.hadoop.yarn." +
      "ResourceProto\0229\n\025configuredMinCapacity\030\010" +
      " \001(\0132\032.hadoop.yarn.ResourceProto\0229\n\025conf" +
      "iguredMaxCapacity\030\t \001(\0132\032.hadoop.yarn.Re" +
      "sourceProto\"x\n\033QueueConfigurationsMapPro" +
      "to\022\025\n\rpartitionName\030\001 \002(\t\022B\n\023queueConfig" +
      "urations\030\002 \001(\0132%.hadoop.yarn.QueueConfig" +
      "urationsProto\"X\n\025QueueUserACLInfoProto\022\021" +
      "\n\tqueueName\030\001 \001(\t\022,\n\010userAcls\030\002 \003(\0162\032.ha" +
      "doop.yarn.QueueACLProto\"\256\001\n\030PlacementCon" +
      "straintProto\022E\n\020simpleConstraint\030\001 \001(\0132+" +
      ".hadoop.yarn.SimplePlacementConstraintPr" +
      "oto\022K\n\023compositeConstraint\030\002 \001(\0132..hadoo" +
      "p.yarn.CompositePlacementConstraintProto" +
      "\"\347\001\n\036SimplePlacementConstraintProto\022\r\n\005s" +
      "cope\030\001 \002(\t\022F\n\021targetExpressions\030\002 \003(\0132+." +
      "hadoop.yarn.PlacementConstraintTargetPro" +
      "to\022\026\n\016minCardinality\030\003 \001(\005\022\026\n\016maxCardina" +
      "lity\030\004 \001(\005\022>\n\017attributeOpCode\030\005 \001(\0162%.ha" +
      "doop.yarn.NodeAttributeOpCodeProto\"\325\001\n\036P" +
      "lacementConstraintTargetProto\022J\n\ntargetT" +
      "ype\030\001 \002(\01626.hadoop.yarn.PlacementConstra" +
      "intTargetProto.TargetType\022\021\n\ttargetKey\030\002" +
      " \001(\t\022\024\n\014targetValues\030\003 \003(\t\">\n\nTargetType" +
      "\022\022\n\016NODE_ATTRIBUTE\020\001\022\022\n\016ALLOCATION_TAG\020\002" +
      "\022\010\n\004SELF\020\003\"\205\002\n\035TimedPlacementConstraintP" +
      "roto\022B\n\023placementConstraint\030\001 \002(\0132%.hado" +
      "op.yarn.PlacementConstraintProto\022\027\n\017sche" +
      "dulingDelay\030\002 \002(\003\022U\n\tdelayUnit\030\003 \001(\01624.h" +
      "adoop.yarn.TimedPlacementConstraintProto" +
      ".DelayUnit:\014MILLISECONDS\"0\n\tDelayUnit\022\020\n" +
      "\014MILLISECONDS\020\001\022\021\n\rOPPORTUNITIES\020\002\"\266\002\n!C" +
      "ompositePlacementConstraintProto\022S\n\rcomp" +
      "ositeType\030\001 \002(\0162<.hadoop.yarn.CompositeP" +
      "lacementConstraintProto.CompositeType\022?\n" +
      "\020childConstraints\030\002 \003(\0132%.hadoop.yarn.Pl" +
      "acementConstraintProto\022I\n\025timedChildCons" +
      "traints\030\003 \003(\0132*.hadoop.yarn.TimedPlaceme" +
      "ntConstraintProto\"0\n\rCompositeType\022\007\n\003AN" +
      "D\020\001\022\006\n\002OR\020\002\022\016\n\nDELAYED_OR\020\003\"\200\001\n Placemen" +
      "tConstraintMapEntryProto\022\027\n\017allocation_t" +
      "ags\030\001 \003(\t\022C\n\024placement_constraint\030\002 \001(\0132" +
      "%.hadoop.yarn.PlacementConstraintProto\";" +
      "\n\022ReservationIdProto\022\n\n\002id\030\001 \001(\003\022\031\n\021clus" +
      "ter_timestamp\030\002 \001(\003\"\222\001\n\027ReservationReque" +
      "stProto\022.\n\ncapability\030\001 \001(\0132\032.hadoop.yar" +
      "n.ResourceProto\022\031\n\016num_containers\030\002 \001(\005:" +
      "\0011\022\026\n\013concurrency\030\003 \001(\005:\0011\022\024\n\010duration\030\004" +
      " \001(\003:\002-1\"\254\001\n\030ReservationRequestsProto\022C\n" +
      "\025reservation_resources\030\001 \003(\0132$.hadoop.ya" +
      "rn.ReservationRequestProto\022K\n\013interprete" +
      "r\030\002 \001(\0162/.hadoop.yarn.ReservationRequest" +
      "InterpreterProto:\005R_ALL\"\356\001\n\032ReservationD" +
      "efinitionProto\022C\n\024reservation_requests\030\001" +
      " \001(\0132%.hadoop.yarn.ReservationRequestsPr" +
      "oto\022\017\n\007arrival\030\002 \001(\003\022\020\n\010deadline\030\003 \001(\003\022\030" +
      "\n\020reservation_name\030\004 \001(\t\022 \n\025recurrence_e" +
      "xpression\030\005 \001(\t:\0010\022,\n\010priority\030\006 \001(\0132\032.h" +
      "adoop.yarn.PriorityProto\"t\n\036ResourceAllo" +
      "cationRequestProto\022\022\n\nstart_time\030\001 \001(\003\022\020" +
      "\n\010end_time\030\002 \001(\003\022,\n\010resource\030\003 \001(\0132\032.had" +
      "oop.yarn.ResourceProto\"\322\002\n\037ReservationAl" +
      "locationStateProto\022G\n\026reservation_defini" +
      "tion\030\001 \001(\0132\'.hadoop.yarn.ReservationDefi" +
      "nitionProto\022H\n\023allocation_requests\030\002 \003(\013" +
      "2+.hadoop.yarn.ResourceAllocationRequest" +
      "Proto\022\022\n\nstart_time\030\003 \001(\003\022\020\n\010end_time\030\004 " +
      "\001(\003\022\014\n\004user\030\005 \001(\t\022\026\n\016contains_gangs\030\006 \001(" +
      "\010\022\027\n\017acceptance_time\030\007 \001(\003\0227\n\016reservatio" +
      "n_id\030\010 \001(\0132\037.hadoop.yarn.ReservationIdPr" +
      "oto\"\216\003\n\033ContainerLaunchContextProto\022@\n\016l" +
      "ocalResources\030\001 \003(\0132(.hadoop.yarn.String" +
      "LocalResourceMapProto\022\016\n\006tokens\030\002 \001(\014\0226\n" +
      "\014service_data\030\003 \003(\0132 .hadoop.yarn.String" +
      "BytesMapProto\0226\n\013environment\030\004 \003(\0132!.had" +
      "oop.yarn.StringStringMapProto\022\017\n\007command" +
      "\030\005 \003(\t\022=\n\020application_ACLs\030\006 \003(\0132#.hadoo" +
      "p.yarn.ApplicationACLMapProto\022H\n\027contain" +
      "er_retry_context\030\007 \001(\0132\'.hadoop.yarn.Con" +
      "tainerRetryContextProto\022\023\n\013tokens_conf\030\010" +
      " \001(\014\"\251\003\n\024ContainerStatusProto\0223\n\014contain" +
      "er_id\030\001 \001(\0132\035.hadoop.yarn.ContainerIdPro" +
      "to\022/\n\005state\030\002 \001(\0162 .hadoop.yarn.Containe" +
      "rStateProto\022\030\n\013diagnostics\030\003 \001(\t:\003N/A\022\032\n" +
      "\013exit_status\030\004 \001(\005:\005-1000\022.\n\ncapability\030" +
      "\005 \001(\0132\032.hadoop.yarn.ResourceProto\022B\n\rexe" +
      "cutionType\030\006 \001(\0162\037.hadoop.yarn.Execution" +
      "TypeProto:\nGUARANTEED\022?\n\024container_attri" +
      "butes\030\007 \003(\0132!.hadoop.yarn.StringStringMa" +
      "pProto\022@\n\023container_sub_state\030\010 \001(\0162#.ha" +
      "doop.yarn.ContainerSubStateProto\"\327\001\n\032Con",
      "tainerRetryContextProto\022I\n\014retry_policy\030" +
      "\001 \001(\0162&.hadoop.yarn.ContainerRetryPolicy" +
      "Proto:\013NEVER_RETRY\022\023\n\013error_codes\030\002 \003(\005\022" +
      "\026\n\013max_retries\030\003 \001(\005:\0010\022\031\n\016retry_interva" +
      "l\030\004 \001(\005:\0010\022&\n\032failures_validity_interval" +
      "\030\005 \001(\003:\002-1\"Z\n\033StringLocalResourceMapProt" +
      "o\022\013\n\003key\030\001 \001(\t\022.\n\005value\030\002 \001(\0132\037.hadoop.y" +
      "arn.LocalResourceProto\"2\n\024StringStringMa" +
      "pProto\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030\002 \001(\t\"1\n\023St" +
      "ringBytesMapProto\022\013\n\003key\030\001 \001(\t\022\r\n\005value\030" +
      "\002 \001(\014\"`\n\022CollectorInfoProto\022\026\n\016collector" +
      "_addr\030\001 \001(\t\0222\n\017collector_token\030\002 \001(\0132\031.h" +
      "adoop.common.TokenProto*#\n\022ResourceTypes" +
      "Proto\022\r\n\tCOUNTABLE\020\000*?\n\023ContainerStatePr" +
      "oto\022\t\n\005C_NEW\020\001\022\r\n\tC_RUNNING\020\002\022\016\n\nC_COMPL" +
      "ETE\020\003*n\n\026ContainerSubStateProto\022\021\n\rCSS_S" +
      "CHEDULED\020\001\022\017\n\013CSS_RUNNING\020\002\022\016\n\nCSS_PAUSE" +
      "D\020\003\022\022\n\016CSS_COMPLETING\020\004\022\014\n\010CSS_DONE\020\005*\204\001" +
      "\n\031YarnApplicationStateProto\022\007\n\003NEW\020\001\022\016\n\n" +
      "NEW_SAVING\020\002\022\r\n\tSUBMITTED\020\003\022\014\n\010ACCEPTED\020" +
      "\004\022\013\n\007RUNNING\020\005\022\014\n\010FINISHED\020\006\022\n\n\006FAILED\020\007" +
      "\022\n\n\006KILLED\020\010*\302\002\n YarnApplicationAttemptS" +
      "tateProto\022\023\n\017APP_ATTEMPT_NEW\020\001\022\031\n\025APP_AT" +
      "TEMPT_SUBMITTED\020\002\022\031\n\025APP_ATTEMPT_SCHEDUL" +
      "ED\020\003\022 \n\034APP_ATTEMPT_ALLOCATED_SAVING\020\004\022\031" +
      "\n\025APP_ATTEMPT_ALLOCATED\020\005\022\030\n\024APP_ATTEMPT" +
      "_LAUNCHED\020\006\022\026\n\022APP_ATTEMPT_FAILED\020\007\022\027\n\023A" +
      "PP_ATTEMPT_RUNNING\020\010\022\031\n\025APP_ATTEMPT_FINI" +
      "SHING\020\t\022\030\n\024APP_ATTEMPT_FINISHED\020\n\022\026\n\022APP" +
      "_ATTEMPT_KILLED\020\013*r\n\033FinalApplicationSta" +
      "tusProto\022\021\n\rAPP_UNDEFINED\020\000\022\021\n\rAPP_SUCCE" +
      "EDED\020\001\022\016\n\nAPP_FAILED\020\002\022\016\n\nAPP_KILLED\020\003\022\r" +
      "\n\tAPP_ENDED\020\004*H\n\034LocalResourceVisibility" +
      "Proto\022\n\n\006PUBLIC\020\001\022\013\n\007PRIVATE\020\002\022\017\n\013APPLIC" +
      "ATION\020\003*<\n\026LocalResourceTypeProto\022\013\n\007ARC" +
      "HIVE\020\001\022\010\n\004FILE\020\002\022\013\n\007PATTERN\020\003*\244\001\n\031LogAgg" +
      "regationStatusProto\022\020\n\014LOG_DISABLED\020\001\022\021\n" +
      "\rLOG_NOT_START\020\002\022\017\n\013LOG_RUNNING\020\003\022\021\n\rLOG" +
      "_SUCCEEDED\020\004\022\016\n\nLOG_FAILED\020\005\022\020\n\014LOG_TIME" +
      "_OUT\020\006\022\034\n\030LOG_RUNNING_WITH_FAILURE\020\007*\234\001\n" +
      "\016NodeStateProto\022\n\n\006NS_NEW\020\001\022\016\n\nNS_RUNNIN" +
      "G\020\002\022\020\n\014NS_UNHEALTHY\020\003\022\025\n\021NS_DECOMMISSION" +
      "ED\020\004\022\013\n\007NS_LOST\020\005\022\017\n\013NS_REBOOTED\020\006\022\026\n\022NS" +
      "_DECOMMISSIONING\020\007\022\017\n\013NS_SHUTDOWN\020\010*S\n\023N" +
      "odeUpdateTypeProto\022\017\n\013NODE_USABLE\020\000\022\021\n\rN" +
      "ODE_UNUSABLE\020\001\022\030\n\024NODE_DECOMMISSIONING\020\002" +
      "*$\n\026NodeAttributeTypeProto\022\n\n\006STRING\020\001*6" +
      "\n\022ContainerTypeProto\022\026\n\022APPLICATION_MAST" +
      "ER\020\001\022\010\n\004TASK\020\002*7\n\022ExecutionTypeProto\022\016\n\n" +
      "GUARANTEED\020\001\022\021\n\rOPPORTUNISTIC\020\002*0\n\016AMCom" +
      "mandProto\022\r\n\tAM_RESYNC\020\001\022\017\n\013AM_SHUTDOWN\020" +
      "\002*[\n\024RejectionReasonProto\022\037\n\033RRP_COULD_N" +
      "OT_PLACE_ON_NODE\020\001\022\"\n\036RRP_COULD_NOT_SCHE" +
      "DULE_ON_NODE\020\002*7\n\033ApplicationTimeoutType" +
      "Proto\022\030\n\024APP_TIMEOUT_LIFETIME\020\001*N\n\032Appli" +
      "cationAccessTypeProto\022\026\n\022APPACCESS_VIEW_" +
      "APP\020\001\022\030\n\024APPACCESS_MODIFY_APP\020\002*?\n\017Queue" +
      "StateProto\022\r\n\tQ_STOPPED\020\001\022\r\n\tQ_RUNNING\020\002" +
      "\022\016\n\nQ_DRAINING\020\003*H\n\rQueueACLProto\022\034\n\030QAC" +
      "L_SUBMIT_APPLICATIONS\020\001\022\031\n\025QACL_ADMINIST" +
      "ER_QUEUE\020\002*c\n\033SignalContainerCommandProt" +
      "o\022\026\n\022OUTPUT_THREAD_DUMP\020\001\022\025\n\021GRACEFUL_SH" +
      "UTDOWN\020\002\022\025\n\021FORCEFUL_SHUTDOWN\020\003*5\n\030NodeA" +
      "ttributeOpCodeProto\022\t\n\005NO_OP\020\001\022\006\n\002EQ\020\002\022\006" +
      "\n\002NE\020\003*[\n\"ReservationRequestInterpreterP" +
      "roto\022\t\n\005R_ANY\020\000\022\t\n\005R_ALL\020\001\022\013\n\007R_ORDER\020\002\022" +
      "\022\n\016R_ORDER_NO_GAP\020\003*n\n\030ContainerExitStat" +
      "usProto\022\013\n\007SUCCESS\020\000\022\024\n\007INVALID\020\230\370\377\377\377\377\377\377" +
      "\377\001\022\024\n\007ABORTED\020\234\377\377\377\377\377\377\377\377\001\022\031\n\014DISKS_FAILED" +
      "\020\233\377\377\377\377\377\377\377\377\001*h\n\031ContainerRetryPolicyProto" +
      "\022\017\n\013NEVER_RETRY\020\000\022\027\n\023RETRY_ON_ALL_ERRORS" +
      "\020\001\022!\n\035RETRY_ON_SPECIFIC_ERROR_CODES\020\002B0\n" +
      "\034org.apache.hadoop.yarn.protoB\nYarnProto" +
      "s\210\001\001\240\001\001"
    };
    descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.security.proto.SecurityProtos.getDescriptor(),
        });
    internal_static_hadoop_yarn_SerializedExceptionProto_descriptor =
      getDescriptor().getMessageTypes().get(0);
    internal_static_hadoop_yarn_SerializedExceptionProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_SerializedExceptionProto_descriptor,
        new java.lang.String[] { "Message", "Trace", "ClassName", "Cause", });
    internal_static_hadoop_yarn_ApplicationIdProto_descriptor =
      getDescriptor().getMessageTypes().get(1);
    internal_static_hadoop_yarn_ApplicationIdProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationIdProto_descriptor,
        new java.lang.String[] { "Id", "ClusterTimestamp", });
    internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor =
      getDescriptor().getMessageTypes().get(2);
    internal_static_hadoop_yarn_ApplicationAttemptIdProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationAttemptIdProto_descriptor,
        new java.lang.String[] { "ApplicationId", "AttemptId", });
    internal_static_hadoop_yarn_ContainerIdProto_descriptor =
      getDescriptor().getMessageTypes().get(3);
    internal_static_hadoop_yarn_ContainerIdProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerIdProto_descriptor,
        new java.lang.String[] { "AppId", "AppAttemptId", "Id", });
    internal_static_hadoop_yarn_ResourceInformationProto_descriptor =
      getDescriptor().getMessageTypes().get(4);
    internal_static_hadoop_yarn_ResourceInformationProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceInformationProto_descriptor,
        new java.lang.String[] { "Key", "Value", "Units", "Type", "Tags", "Attributes", });
    internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor =
      getDescriptor().getMessageTypes().get(5);
    internal_static_hadoop_yarn_ResourceTypeInfoProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceTypeInfoProto_descriptor,
        new java.lang.String[] { "Name", "Units", "Type", });
    internal_static_hadoop_yarn_ResourceProto_descriptor =
      getDescriptor().getMessageTypes().get(6);
    internal_static_hadoop_yarn_ResourceProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceProto_descriptor,
        new java.lang.String[] { "Memory", "VirtualCores", "ResourceValueMap", });
    internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor =
      getDescriptor().getMessageTypes().get(7);
    internal_static_hadoop_yarn_ResourceUtilizationProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceUtilizationProto_descriptor,
        new java.lang.String[] { "Pmem", "Vmem", "Cpu", "CustomResources", });
    internal_static_hadoop_yarn_ResourceOptionProto_descriptor =
      getDescriptor().getMessageTypes().get(8);
    internal_static_hadoop_yarn_ResourceOptionProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceOptionProto_descriptor,
        new java.lang.String[] { "Resource", "OverCommitTimeout", });
    internal_static_hadoop_yarn_ResourceProfileEntry_descriptor =
      getDescriptor().getMessageTypes().get(9);
    internal_static_hadoop_yarn_ResourceProfileEntry_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceProfileEntry_descriptor,
        new java.lang.String[] { "Name", "Resources", });
    internal_static_hadoop_yarn_ResourceProfilesProto_descriptor =
      getDescriptor().getMessageTypes().get(10);
    internal_static_hadoop_yarn_ResourceProfilesProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceProfilesProto_descriptor,
        new java.lang.String[] { "ResourceProfilesMap", });
    internal_static_hadoop_yarn_NodeResourceMapProto_descriptor =
      getDescriptor().getMessageTypes().get(11);
    internal_static_hadoop_yarn_NodeResourceMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeResourceMapProto_descriptor,
        new java.lang.String[] { "NodeId", "ResourceOption", });
    internal_static_hadoop_yarn_PriorityProto_descriptor =
      getDescriptor().getMessageTypes().get(12);
    internal_static_hadoop_yarn_PriorityProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_PriorityProto_descriptor,
        new java.lang.String[] { "Priority", });
    internal_static_hadoop_yarn_ContainerProto_descriptor =
      getDescriptor().getMessageTypes().get(13);
    internal_static_hadoop_yarn_ContainerProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerProto_descriptor,
        new java.lang.String[] { "Id", "NodeId", "NodeHttpAddress", "Resource", "Priority", "ContainerToken", "ExecutionType", "AllocationRequestId", "Version", "AllocationTags", "ExposedPorts", });
    internal_static_hadoop_yarn_ContainerReportProto_descriptor =
      getDescriptor().getMessageTypes().get(14);
    internal_static_hadoop_yarn_ContainerReportProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerReportProto_descriptor,
        new java.lang.String[] { "ContainerId", "Resource", "NodeId", "Priority", "CreationTime", "FinishTime", "DiagnosticsInfo", "LogUrl", "ContainerExitStatus", "ContainerState", "NodeHttpAddress", "ExecutionType", "ExposedPorts", });
    internal_static_hadoop_yarn_URLProto_descriptor =
      getDescriptor().getMessageTypes().get(15);
    internal_static_hadoop_yarn_URLProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_URLProto_descriptor,
        new java.lang.String[] { "Scheme", "Host", "Port", "File", "UserInfo", });
    internal_static_hadoop_yarn_LocalResourceProto_descriptor =
      getDescriptor().getMessageTypes().get(16);
    internal_static_hadoop_yarn_LocalResourceProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_LocalResourceProto_descriptor,
        new java.lang.String[] { "Resource", "Size", "Timestamp", "Type", "Visibility", "Pattern", "ShouldBeUploadedToSharedCache", });
    internal_static_hadoop_yarn_StringLongMapProto_descriptor =
      getDescriptor().getMessageTypes().get(17);
    internal_static_hadoop_yarn_StringLongMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StringLongMapProto_descriptor,
        new java.lang.String[] { "Key", "Value", });
    internal_static_hadoop_yarn_StringFloatMapProto_descriptor =
      getDescriptor().getMessageTypes().get(18);
    internal_static_hadoop_yarn_StringFloatMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StringFloatMapProto_descriptor,
        new java.lang.String[] { "Key", "Value", });
    internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor =
      getDescriptor().getMessageTypes().get(19);
    internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationResourceUsageReportProto_descriptor,
        new java.lang.String[] { "NumUsedContainers", "NumReservedContainers", "UsedResources", "ReservedResources", "NeededResources", "MemorySeconds", "VcoreSeconds", "QueueUsagePercentage", "ClusterUsagePercentage", "PreemptedMemorySeconds", "PreemptedVcoreSeconds", "ApplicationResourceUsageMap", "ApplicationPreemptedResourceUsageMap", });
    internal_static_hadoop_yarn_ApplicationReportProto_descriptor =
      getDescriptor().getMessageTypes().get(20);
    internal_static_hadoop_yarn_ApplicationReportProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationReportProto_descriptor,
        new java.lang.String[] { "ApplicationId", "User", "Queue", "Name", "Host", "RpcPort", "ClientToAmToken", "YarnApplicationState", "TrackingUrl", "Diagnostics", "StartTime", "FinishTime", "FinalApplicationStatus", "AppResourceUsage", "OriginalTrackingUrl", "CurrentApplicationAttemptId", "Progress", "ApplicationType", "AmRmToken", "ApplicationTags", "LogAggregationStatus", "UnmanagedApplication", "Priority", "AppNodeLabelExpression", "AmNodeLabelExpression", "AppTimeouts", "LaunchTime", "SubmitTime", "RmClusterId", });
    internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor =
      getDescriptor().getMessageTypes().get(21);
    internal_static_hadoop_yarn_AppTimeoutsMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_AppTimeoutsMapProto_descriptor,
        new java.lang.String[] { "ApplicationTimeoutType", "ApplicationTimeout", });
    internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor =
      getDescriptor().getMessageTypes().get(22);
    internal_static_hadoop_yarn_ApplicationTimeoutProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationTimeoutProto_descriptor,
        new java.lang.String[] { "ApplicationTimeoutType", "ExpireTime", "RemainingTime", });
    internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor =
      getDescriptor().getMessageTypes().get(23);
    internal_static_hadoop_yarn_ApplicationAttemptReportProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationAttemptReportProto_descriptor,
        new java.lang.String[] { "ApplicationAttemptId", "Host", "RpcPort", "TrackingUrl", "Diagnostics", "YarnApplicationAttemptState", "AmContainerId", "OriginalTrackingUrl", "StartTime", "FinishTime", });
    internal_static_hadoop_yarn_NodeIdProto_descriptor =
      getDescriptor().getMessageTypes().get(24);
    internal_static_hadoop_yarn_NodeIdProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeIdProto_descriptor,
        new java.lang.String[] { "Host", "Port", });
    internal_static_hadoop_yarn_NodeReportProto_descriptor =
      getDescriptor().getMessageTypes().get(25);
    internal_static_hadoop_yarn_NodeReportProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeReportProto_descriptor,
        new java.lang.String[] { "NodeId", "HttpAddress", "RackName", "Used", "Capability", "NumContainers", "NodeState", "HealthReport", "LastHealthReportTime", "NodeLabels", "ContainersUtilization", "NodeUtilization", "DecommissioningTimeout", "NodeUpdateType", "NodeAttributes", });
    internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor =
      getDescriptor().getMessageTypes().get(26);
    internal_static_hadoop_yarn_NodeIdToLabelsProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeIdToLabelsProto_descriptor,
        new java.lang.String[] { "NodeId", "NodeLabels", });
    internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor =
      getDescriptor().getMessageTypes().get(27);
    internal_static_hadoop_yarn_LabelsToNodeIdsProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_LabelsToNodeIdsProto_descriptor,
        new java.lang.String[] { "NodeLabels", "NodeId", });
    internal_static_hadoop_yarn_NodeLabelProto_descriptor =
      getDescriptor().getMessageTypes().get(28);
    internal_static_hadoop_yarn_NodeLabelProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeLabelProto_descriptor,
        new java.lang.String[] { "Name", "IsExclusive", });
    internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor =
      getDescriptor().getMessageTypes().get(29);
    internal_static_hadoop_yarn_NodeAttributeKeyProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeAttributeKeyProto_descriptor,
        new java.lang.String[] { "AttributePrefix", "AttributeName", });
    internal_static_hadoop_yarn_NodeAttributeProto_descriptor =
      getDescriptor().getMessageTypes().get(30);
    internal_static_hadoop_yarn_NodeAttributeProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeAttributeProto_descriptor,
        new java.lang.String[] { "AttributeKey", "AttributeType", "AttributeValue", });
    internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor =
      getDescriptor().getMessageTypes().get(31);
    internal_static_hadoop_yarn_NodeAttributeInfoProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeAttributeInfoProto_descriptor,
        new java.lang.String[] { "AttributeKey", "AttributeType", });
    internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor =
      getDescriptor().getMessageTypes().get(32);
    internal_static_hadoop_yarn_NodeToAttributeValueProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeToAttributeValueProto_descriptor,
        new java.lang.String[] { "Hostname", "AttributeValue", });
    internal_static_hadoop_yarn_AttributeToNodesProto_descriptor =
      getDescriptor().getMessageTypes().get(33);
    internal_static_hadoop_yarn_AttributeToNodesProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_AttributeToNodesProto_descriptor,
        new java.lang.String[] { "NodeAttribute", "NodeValueMap", });
    internal_static_hadoop_yarn_NodeToAttributesProto_descriptor =
      getDescriptor().getMessageTypes().get(34);
    internal_static_hadoop_yarn_NodeToAttributesProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeToAttributesProto_descriptor,
        new java.lang.String[] { "Node", "NodeAttributes", });
    internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor =
      getDescriptor().getMessageTypes().get(35);
    internal_static_hadoop_yarn_DeregisterSubClustersProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_DeregisterSubClustersProto_descriptor,
        new java.lang.String[] { "SubClusterId", "DeregisterState", "LastHeartBeatTime", "Information", "SubClusterState", });
    internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor =
      getDescriptor().getMessageTypes().get(36);
    internal_static_hadoop_yarn_FederationQueueWeightProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_FederationQueueWeightProto_descriptor,
        new java.lang.String[] { "RouterWeight", "AmrmWeight", "HeadRoomAlpha", "Queue", "PolicyManagerClassName", });
    internal_static_hadoop_yarn_FederationSubClusterProto_descriptor =
      getDescriptor().getMessageTypes().get(37);
    internal_static_hadoop_yarn_FederationSubClusterProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_FederationSubClusterProto_descriptor,
        new java.lang.String[] { "SubClusterId", "LastHeartBeatTime", "SubClusterState", });
    internal_static_hadoop_yarn_ResourceRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(38);
    internal_static_hadoop_yarn_ResourceRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceRequestProto_descriptor,
        new java.lang.String[] { "Priority", "ResourceName", "Capability", "NumContainers", "RelaxLocality", "NodeLabelExpression", "ExecutionTypeRequest", "AllocationRequestId", });
    internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(39);
    internal_static_hadoop_yarn_ExecutionTypeRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ExecutionTypeRequestProto_descriptor,
        new java.lang.String[] { "ExecutionType", "EnforceExecutionType", });
    internal_static_hadoop_yarn_SchedulingRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(40);
    internal_static_hadoop_yarn_SchedulingRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_SchedulingRequestProto_descriptor,
        new java.lang.String[] { "AllocationRequestId", "Priority", "ExecutionType", "AllocationTags", "ResourceSizing", "PlacementConstraint", });
    internal_static_hadoop_yarn_ResourceSizingProto_descriptor =
      getDescriptor().getMessageTypes().get(41);
    internal_static_hadoop_yarn_ResourceSizingProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceSizingProto_descriptor,
        new java.lang.String[] { "NumAllocations", "Resources", });
    internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(42);
    internal_static_hadoop_yarn_RejectedSchedulingRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_RejectedSchedulingRequestProto_descriptor,
        new java.lang.String[] { "Reason", "Request", });
    internal_static_hadoop_yarn_PreemptionMessageProto_descriptor =
      getDescriptor().getMessageTypes().get(43);
    internal_static_hadoop_yarn_PreemptionMessageProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_PreemptionMessageProto_descriptor,
        new java.lang.String[] { "StrictContract", "Contract", });
    internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor =
      getDescriptor().getMessageTypes().get(44);
    internal_static_hadoop_yarn_StrictPreemptionContractProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StrictPreemptionContractProto_descriptor,
        new java.lang.String[] { "Container", });
    internal_static_hadoop_yarn_PreemptionContractProto_descriptor =
      getDescriptor().getMessageTypes().get(45);
    internal_static_hadoop_yarn_PreemptionContractProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_PreemptionContractProto_descriptor,
        new java.lang.String[] { "Resource", "Container", });
    internal_static_hadoop_yarn_PreemptionContainerProto_descriptor =
      getDescriptor().getMessageTypes().get(46);
    internal_static_hadoop_yarn_PreemptionContainerProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_PreemptionContainerProto_descriptor,
        new java.lang.String[] { "Id", });
    internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(47);
    internal_static_hadoop_yarn_PreemptionResourceRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_PreemptionResourceRequestProto_descriptor,
        new java.lang.String[] { "Resource", });
    internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(48);
    internal_static_hadoop_yarn_ResourceBlacklistRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceBlacklistRequestProto_descriptor,
        new java.lang.String[] { "BlacklistAdditions", "BlacklistRemovals", });
    internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor =
      getDescriptor().getMessageTypes().get(49);
    internal_static_hadoop_yarn_ApplicationSubmissionContextProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationSubmissionContextProto_descriptor,
        new java.lang.String[] { "ApplicationId", "ApplicationName", "Queue", "Priority", "AmContainerSpec", "CancelTokensWhenComplete", "UnmanagedAm", "MaxAppAttempts", "Resource", "ApplicationType", "KeepContainersAcrossApplicationAttempts", "ApplicationTags", "AttemptFailuresValidityInterval", "LogAggregationContext", "ReservationId", "NodeLabelExpression", "AmContainerResourceRequest", "ApplicationTimeouts", "ApplicationSchedulingProperties", });
    internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor =
      getDescriptor().getMessageTypes().get(50);
    internal_static_hadoop_yarn_ApplicationTimeoutMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationTimeoutMapProto_descriptor,
        new java.lang.String[] { "ApplicationTimeoutType", "Timeout", });
    internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor =
      getDescriptor().getMessageTypes().get(51);
    internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationUpdateTimeoutMapProto_descriptor,
        new java.lang.String[] { "ApplicationTimeoutType", "ExpireTime", });
    internal_static_hadoop_yarn_LogAggregationContextProto_descriptor =
      getDescriptor().getMessageTypes().get(52);
    internal_static_hadoop_yarn_LogAggregationContextProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_LogAggregationContextProto_descriptor,
        new java.lang.String[] { "IncludePattern", "ExcludePattern", "RolledLogsIncludePattern", "RolledLogsExcludePattern", "LogAggregationPolicyClassName", "LogAggregationPolicyParameters", });
    internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor =
      getDescriptor().getMessageTypes().get(53);
    internal_static_hadoop_yarn_ApplicationACLMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ApplicationACLMapProto_descriptor,
        new java.lang.String[] { "AccessType", "Acl", });
    internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor =
      getDescriptor().getMessageTypes().get(54);
    internal_static_hadoop_yarn_YarnClusterMetricsProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_YarnClusterMetricsProto_descriptor,
        new java.lang.String[] { "NumNodeManagers", "NumDecommissionedNms", "NumActiveNms", "NumLostNms", "NumUnhealthyNms", "NumRebootedNms", "NumDecommissioningNms", "NumShutdownNms", });
    internal_static_hadoop_yarn_QueueStatisticsProto_descriptor =
      getDescriptor().getMessageTypes().get(55);
    internal_static_hadoop_yarn_QueueStatisticsProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_QueueStatisticsProto_descriptor,
        new java.lang.String[] { "NumAppsSubmitted", "NumAppsRunning", "NumAppsPending", "NumAppsCompleted", "NumAppsKilled", "NumAppsFailed", "NumActiveUsers", "AvailableMemoryMB", "AllocatedMemoryMB", "PendingMemoryMB", "ReservedMemoryMB", "AvailableVCores", "AllocatedVCores", "PendingVCores", "ReservedVCores", "AllocatedContainers", "PendingContainers", "ReservedContainers", });
    internal_static_hadoop_yarn_QueueInfoProto_descriptor =
      getDescriptor().getMessageTypes().get(56);
    internal_static_hadoop_yarn_QueueInfoProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_QueueInfoProto_descriptor,
        new java.lang.String[] { "QueueName", "Capacity", "MaximumCapacity", "CurrentCapacity", "State", "ChildQueues", "Applications", "AccessibleNodeLabels", "DefaultNodeLabelExpression", "QueueStatistics", "PreemptionDisabled", "QueueConfigurationsMap", "IntraQueuePreemptionDisabled", "Weight", "QueuePath", "MaxParallelApps", "SchedulerType", "MinResourceVCore", "MinResourceMemory", "MaxResourceVCore", "MaxResourceMemory", "ReservedResourceVCore", "ReservedResourceMemory", "SteadyFairShareVCore", "SteadyFairShareMemory", "SubClusterId", "MaxRunningApp", });
    internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor =
      getDescriptor().getMessageTypes().get(57);
    internal_static_hadoop_yarn_QueueConfigurationsProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_QueueConfigurationsProto_descriptor,
        new java.lang.String[] { "Capacity", "AbsoluteCapacity", "MaxCapacity", "AbsoluteMaxCapacity", "MaxAMPercentage", "EffectiveMinCapacity", "EffectiveMaxCapacity", "ConfiguredMinCapacity", "ConfiguredMaxCapacity", });
    internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor =
      getDescriptor().getMessageTypes().get(58);
    internal_static_hadoop_yarn_QueueConfigurationsMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_QueueConfigurationsMapProto_descriptor,
        new java.lang.String[] { "PartitionName", "QueueConfigurations", });
    internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor =
      getDescriptor().getMessageTypes().get(59);
    internal_static_hadoop_yarn_QueueUserACLInfoProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_QueueUserACLInfoProto_descriptor,
        new java.lang.String[] { "QueueName", "UserAcls", });
    internal_static_hadoop_yarn_PlacementConstraintProto_descriptor =
      getDescriptor().getMessageTypes().get(60);
    internal_static_hadoop_yarn_PlacementConstraintProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_PlacementConstraintProto_descriptor,
        new java.lang.String[] { "SimpleConstraint", "CompositeConstraint", });
    internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor =
      getDescriptor().getMessageTypes().get(61);
    internal_static_hadoop_yarn_SimplePlacementConstraintProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_SimplePlacementConstraintProto_descriptor,
        new java.lang.String[] { "Scope", "TargetExpressions", "MinCardinality", "MaxCardinality", "AttributeOpCode", });
    internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor =
      getDescriptor().getMessageTypes().get(62);
    internal_static_hadoop_yarn_PlacementConstraintTargetProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_PlacementConstraintTargetProto_descriptor,
        new java.lang.String[] { "TargetType", "TargetKey", "TargetValues", });
    internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor =
      getDescriptor().getMessageTypes().get(63);
    internal_static_hadoop_yarn_TimedPlacementConstraintProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_TimedPlacementConstraintProto_descriptor,
        new java.lang.String[] { "PlacementConstraint", "SchedulingDelay", "DelayUnit", });
    internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor =
      getDescriptor().getMessageTypes().get(64);
    internal_static_hadoop_yarn_CompositePlacementConstraintProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_CompositePlacementConstraintProto_descriptor,
        new java.lang.String[] { "CompositeType", "ChildConstraints", "TimedChildConstraints", });
    internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor =
      getDescriptor().getMessageTypes().get(65);
    internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_PlacementConstraintMapEntryProto_descriptor,
        new java.lang.String[] { "AllocationTags", "PlacementConstraint", });
    internal_static_hadoop_yarn_ReservationIdProto_descriptor =
      getDescriptor().getMessageTypes().get(66);
    internal_static_hadoop_yarn_ReservationIdProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationIdProto_descriptor,
        new java.lang.String[] { "Id", "ClusterTimestamp", });
    internal_static_hadoop_yarn_ReservationRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(67);
    internal_static_hadoop_yarn_ReservationRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationRequestProto_descriptor,
        new java.lang.String[] { "Capability", "NumContainers", "Concurrency", "Duration", });
    internal_static_hadoop_yarn_ReservationRequestsProto_descriptor =
      getDescriptor().getMessageTypes().get(68);
    internal_static_hadoop_yarn_ReservationRequestsProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationRequestsProto_descriptor,
        new java.lang.String[] { "ReservationResources", "Interpreter", });
    internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor =
      getDescriptor().getMessageTypes().get(69);
    internal_static_hadoop_yarn_ReservationDefinitionProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationDefinitionProto_descriptor,
        new java.lang.String[] { "ReservationRequests", "Arrival", "Deadline", "ReservationName", "RecurrenceExpression", "Priority", });
    internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor =
      getDescriptor().getMessageTypes().get(70);
    internal_static_hadoop_yarn_ResourceAllocationRequestProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ResourceAllocationRequestProto_descriptor,
        new java.lang.String[] { "StartTime", "EndTime", "Resource", });
    internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor =
      getDescriptor().getMessageTypes().get(71);
    internal_static_hadoop_yarn_ReservationAllocationStateProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ReservationAllocationStateProto_descriptor,
        new java.lang.String[] { "ReservationDefinition", "AllocationRequests", "StartTime", "EndTime", "User", "ContainsGangs", "AcceptanceTime", "ReservationId", });
    internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor =
      getDescriptor().getMessageTypes().get(72);
    internal_static_hadoop_yarn_ContainerLaunchContextProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerLaunchContextProto_descriptor,
        new java.lang.String[] { "LocalResources", "Tokens", "ServiceData", "Environment", "Command", "ApplicationACLs", "ContainerRetryContext", "TokensConf", });
    internal_static_hadoop_yarn_ContainerStatusProto_descriptor =
      getDescriptor().getMessageTypes().get(73);
    internal_static_hadoop_yarn_ContainerStatusProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerStatusProto_descriptor,
        new java.lang.String[] { "ContainerId", "State", "Diagnostics", "ExitStatus", "Capability", "ExecutionType", "ContainerAttributes", "ContainerSubState", });
    internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor =
      getDescriptor().getMessageTypes().get(74);
    internal_static_hadoop_yarn_ContainerRetryContextProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerRetryContextProto_descriptor,
        new java.lang.String[] { "RetryPolicy", "ErrorCodes", "MaxRetries", "RetryInterval", "FailuresValidityInterval", });
    internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor =
      getDescriptor().getMessageTypes().get(75);
    internal_static_hadoop_yarn_StringLocalResourceMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StringLocalResourceMapProto_descriptor,
        new java.lang.String[] { "Key", "Value", });
    internal_static_hadoop_yarn_StringStringMapProto_descriptor =
      getDescriptor().getMessageTypes().get(76);
    internal_static_hadoop_yarn_StringStringMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StringStringMapProto_descriptor,
        new java.lang.String[] { "Key", "Value", });
    internal_static_hadoop_yarn_StringBytesMapProto_descriptor =
      getDescriptor().getMessageTypes().get(77);
    internal_static_hadoop_yarn_StringBytesMapProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_StringBytesMapProto_descriptor,
        new java.lang.String[] { "Key", "Value", });
    internal_static_hadoop_yarn_CollectorInfoProto_descriptor =
      getDescriptor().getMessageTypes().get(78);
    internal_static_hadoop_yarn_CollectorInfoProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_CollectorInfoProto_descriptor,
        new java.lang.String[] { "CollectorAddr", "CollectorToken", });
    org.apache.hadoop.security.proto.SecurityProtos.getDescriptor();
  }

  // @@protoc_insertion_point(outer_class_scope)
}