YarnServerNodemanagerRecoveryProtos.java

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: yarn_server_nodemanager_recovery.proto

// Protobuf Java Version: 3.25.5
package org.apache.hadoop.yarn.proto;

public final class YarnServerNodemanagerRecoveryProtos {
  private YarnServerNodemanagerRecoveryProtos() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  public interface ContainerManagerApplicationProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ContainerManagerApplicationProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
     * @return Whether the id field is set.
     */
    boolean hasId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
     * @return The id.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getId();
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getIdOrBuilder();

    /**
     * <code>optional string user = 2;</code>
     * @return Whether the user field is set.
     */
    boolean hasUser();
    /**
     * <code>optional string user = 2;</code>
     * @return The user.
     */
    java.lang.String getUser();
    /**
     * <code>optional string user = 2;</code>
     * @return The bytes for user.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes();

    /**
     * <code>optional bytes credentials = 3;</code>
     * @return Whether the credentials field is set.
     */
    boolean hasCredentials();
    /**
     * <code>optional bytes credentials = 3;</code>
     * @return The credentials.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString getCredentials();

    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> 
        getAclsList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getAcls(int index);
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
     */
    int getAclsCount();
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
        getAclsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getAclsOrBuilder(
        int index);

    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
     * @return Whether the logAggregationContext field is set.
     */
    boolean hasLogAggregationContext();
    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
     * @return The logAggregationContext.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext();
    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder();

    /**
     * <code>optional int64 appLogAggregationInitedTime = 6 [default = -1];</code>
     * @return Whether the appLogAggregationInitedTime field is set.
     */
    boolean hasAppLogAggregationInitedTime();
    /**
     * <code>optional int64 appLogAggregationInitedTime = 6 [default = -1];</code>
     * @return The appLogAggregationInitedTime.
     */
    long getAppLogAggregationInitedTime();

    /**
     * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
     * @return Whether the flowContext field is set.
     */
    boolean hasFlowContext();
    /**
     * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
     * @return The flowContext.
     */
    org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto getFlowContext();
    /**
     * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProtoOrBuilder getFlowContextOrBuilder();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ContainerManagerApplicationProto}
   */
  public static final class ContainerManagerApplicationProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ContainerManagerApplicationProto)
      ContainerManagerApplicationProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ContainerManagerApplicationProto.newBuilder() to construct.
    private ContainerManagerApplicationProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ContainerManagerApplicationProto() {
      user_ = "";
      credentials_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
      acls_ = java.util.Collections.emptyList();
      appLogAggregationInitedTime_ = -1L;
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ContainerManagerApplicationProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_ContainerManagerApplicationProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_ContainerManagerApplicationProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto.class, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto.Builder.class);
    }

    private int bitField0_;
    public static final int ID_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto id_;
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
     * @return Whether the id field is set.
     */
    @java.lang.Override
    public boolean hasId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
     * @return The id.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getId() {
      return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : id_;
    }
    /**
     * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getIdOrBuilder() {
      return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : id_;
    }

    public static final int USER_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object user_ = "";
    /**
     * <code>optional string user = 2;</code>
     * @return Whether the user field is set.
     */
    @java.lang.Override
    public boolean hasUser() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string user = 2;</code>
     * @return The user.
     */
    @java.lang.Override
    public java.lang.String getUser() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          user_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string user = 2;</code>
     * @return The bytes for user.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        user_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int CREDENTIALS_FIELD_NUMBER = 3;
    private org.apache.hadoop.thirdparty.protobuf.ByteString credentials_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
    /**
     * <code>optional bytes credentials = 3;</code>
     * @return Whether the credentials field is set.
     */
    @java.lang.Override
    public boolean hasCredentials() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional bytes credentials = 3;</code>
     * @return The credentials.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString getCredentials() {
      return credentials_;
    }

    public static final int ACLS_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> acls_;
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> getAclsList() {
      return acls_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
        getAclsOrBuilderList() {
      return acls_;
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
     */
    @java.lang.Override
    public int getAclsCount() {
      return acls_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getAcls(int index) {
      return acls_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getAclsOrBuilder(
        int index) {
      return acls_.get(index);
    }

    public static final int LOG_AGGREGATION_CONTEXT_FIELD_NUMBER = 5;
    private org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto logAggregationContext_;
    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
     * @return Whether the logAggregationContext field is set.
     */
    @java.lang.Override
    public boolean hasLogAggregationContext() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
     * @return The logAggregationContext.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext() {
      return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
    }
    /**
     * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder() {
      return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
    }

    public static final int APPLOGAGGREGATIONINITEDTIME_FIELD_NUMBER = 6;
    private long appLogAggregationInitedTime_ = -1L;
    /**
     * <code>optional int64 appLogAggregationInitedTime = 6 [default = -1];</code>
     * @return Whether the appLogAggregationInitedTime field is set.
     */
    @java.lang.Override
    public boolean hasAppLogAggregationInitedTime() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional int64 appLogAggregationInitedTime = 6 [default = -1];</code>
     * @return The appLogAggregationInitedTime.
     */
    @java.lang.Override
    public long getAppLogAggregationInitedTime() {
      return appLogAggregationInitedTime_;
    }

    public static final int FLOWCONTEXT_FIELD_NUMBER = 7;
    private org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto flowContext_;
    /**
     * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
     * @return Whether the flowContext field is set.
     */
    @java.lang.Override
    public boolean hasFlowContext() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
     * @return The flowContext.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto getFlowContext() {
      return flowContext_ == null ? org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.getDefaultInstance() : flowContext_;
    }
    /**
     * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProtoOrBuilder getFlowContextOrBuilder() {
      return flowContext_ == null ? org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.getDefaultInstance() : flowContext_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, user_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeBytes(3, credentials_);
      }
      for (int i = 0; i < acls_.size(); i++) {
        output.writeMessage(4, acls_.get(i));
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(5, getLogAggregationContext());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeInt64(6, appLogAggregationInitedTime_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        output.writeMessage(7, getFlowContext());
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getId());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, user_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBytesSize(3, credentials_);
      }
      for (int i = 0; i < acls_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(4, acls_.get(i));
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(5, getLogAggregationContext());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(6, appLogAggregationInitedTime_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, getFlowContext());
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto other = (org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto) obj;

      if (hasId() != other.hasId()) return false;
      if (hasId()) {
        if (!getId()
            .equals(other.getId())) return false;
      }
      if (hasUser() != other.hasUser()) return false;
      if (hasUser()) {
        if (!getUser()
            .equals(other.getUser())) return false;
      }
      if (hasCredentials() != other.hasCredentials()) return false;
      if (hasCredentials()) {
        if (!getCredentials()
            .equals(other.getCredentials())) return false;
      }
      if (!getAclsList()
          .equals(other.getAclsList())) return false;
      if (hasLogAggregationContext() != other.hasLogAggregationContext()) return false;
      if (hasLogAggregationContext()) {
        if (!getLogAggregationContext()
            .equals(other.getLogAggregationContext())) return false;
      }
      if (hasAppLogAggregationInitedTime() != other.hasAppLogAggregationInitedTime()) return false;
      if (hasAppLogAggregationInitedTime()) {
        if (getAppLogAggregationInitedTime()
            != other.getAppLogAggregationInitedTime()) return false;
      }
      if (hasFlowContext() != other.hasFlowContext()) return false;
      if (hasFlowContext()) {
        if (!getFlowContext()
            .equals(other.getFlowContext())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + getId().hashCode();
      }
      if (hasUser()) {
        hash = (37 * hash) + USER_FIELD_NUMBER;
        hash = (53 * hash) + getUser().hashCode();
      }
      if (hasCredentials()) {
        hash = (37 * hash) + CREDENTIALS_FIELD_NUMBER;
        hash = (53 * hash) + getCredentials().hashCode();
      }
      if (getAclsCount() > 0) {
        hash = (37 * hash) + ACLS_FIELD_NUMBER;
        hash = (53 * hash) + getAclsList().hashCode();
      }
      if (hasLogAggregationContext()) {
        hash = (37 * hash) + LOG_AGGREGATION_CONTEXT_FIELD_NUMBER;
        hash = (53 * hash) + getLogAggregationContext().hashCode();
      }
      if (hasAppLogAggregationInitedTime()) {
        hash = (37 * hash) + APPLOGAGGREGATIONINITEDTIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getAppLogAggregationInitedTime());
      }
      if (hasFlowContext()) {
        hash = (37 * hash) + FLOWCONTEXT_FIELD_NUMBER;
        hash = (53 * hash) + getFlowContext().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ContainerManagerApplicationProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ContainerManagerApplicationProto)
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_ContainerManagerApplicationProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_ContainerManagerApplicationProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto.class, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getIdFieldBuilder();
          getAclsFieldBuilder();
          getLogAggregationContextFieldBuilder();
          getFlowContextFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        id_ = null;
        if (idBuilder_ != null) {
          idBuilder_.dispose();
          idBuilder_ = null;
        }
        user_ = "";
        credentials_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
        if (aclsBuilder_ == null) {
          acls_ = java.util.Collections.emptyList();
        } else {
          acls_ = null;
          aclsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000008);
        logAggregationContext_ = null;
        if (logAggregationContextBuilder_ != null) {
          logAggregationContextBuilder_.dispose();
          logAggregationContextBuilder_ = null;
        }
        appLogAggregationInitedTime_ = -1L;
        flowContext_ = null;
        if (flowContextBuilder_ != null) {
          flowContextBuilder_.dispose();
          flowContextBuilder_ = null;
        }
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_ContainerManagerApplicationProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto build() {
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto result = new org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto result) {
        if (aclsBuilder_ == null) {
          if (((bitField0_ & 0x00000008) != 0)) {
            acls_ = java.util.Collections.unmodifiableList(acls_);
            bitField0_ = (bitField0_ & ~0x00000008);
          }
          result.acls_ = acls_;
        } else {
          result.acls_ = aclsBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.id_ = idBuilder_ == null
              ? id_
              : idBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.user_ = user_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.credentials_ = credentials_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.logAggregationContext_ = logAggregationContextBuilder_ == null
              ? logAggregationContext_
              : logAggregationContextBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.appLogAggregationInitedTime_ = appLogAggregationInitedTime_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.flowContext_ = flowContextBuilder_ == null
              ? flowContext_
              : flowContextBuilder_.build();
          to_bitField0_ |= 0x00000020;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto.getDefaultInstance()) return this;
        if (other.hasId()) {
          mergeId(other.getId());
        }
        if (other.hasUser()) {
          user_ = other.user_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasCredentials()) {
          setCredentials(other.getCredentials());
        }
        if (aclsBuilder_ == null) {
          if (!other.acls_.isEmpty()) {
            if (acls_.isEmpty()) {
              acls_ = other.acls_;
              bitField0_ = (bitField0_ & ~0x00000008);
            } else {
              ensureAclsIsMutable();
              acls_.addAll(other.acls_);
            }
            onChanged();
          }
        } else {
          if (!other.acls_.isEmpty()) {
            if (aclsBuilder_.isEmpty()) {
              aclsBuilder_.dispose();
              aclsBuilder_ = null;
              acls_ = other.acls_;
              bitField0_ = (bitField0_ & ~0x00000008);
              aclsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getAclsFieldBuilder() : null;
            } else {
              aclsBuilder_.addAllMessages(other.acls_);
            }
          }
        }
        if (other.hasLogAggregationContext()) {
          mergeLogAggregationContext(other.getLogAggregationContext());
        }
        if (other.hasAppLogAggregationInitedTime()) {
          setAppLogAggregationInitedTime(other.getAppLogAggregationInitedTime());
        }
        if (other.hasFlowContext()) {
          mergeFlowContext(other.getFlowContext());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getIdFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                user_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                credentials_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.PARSER,
                        extensionRegistry);
                if (aclsBuilder_ == null) {
                  ensureAclsIsMutable();
                  acls_.add(m);
                } else {
                  aclsBuilder_.addMessage(m);
                }
                break;
              } // case 34
              case 42: {
                input.readMessage(
                    getLogAggregationContextFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 48: {
                appLogAggregationInitedTime_ = input.readInt64();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 58: {
                input.readMessage(
                    getFlowContextFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000040;
                break;
              } // case 58
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto id_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> idBuilder_;
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
       * @return Whether the id field is set.
       */
      public boolean hasId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
       * @return The id.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto getId() {
        if (idBuilder_ == null) {
          return id_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : id_;
        } else {
          return idBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
       */
      public Builder setId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (idBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          id_ = value;
        } else {
          idBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
       */
      public Builder setId(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder builderForValue) {
        if (idBuilder_ == null) {
          id_ = builderForValue.build();
        } else {
          idBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
       */
      public Builder mergeId(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto value) {
        if (idBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            id_ != null &&
            id_ != org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance()) {
            getIdBuilder().mergeFrom(value);
          } else {
            id_ = value;
          }
        } else {
          idBuilder_.mergeFrom(value);
        }
        if (id_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
       */
      public Builder clearId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        id_ = null;
        if (idBuilder_ != null) {
          idBuilder_.dispose();
          idBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder getIdBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getIdFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder getIdOrBuilder() {
        if (idBuilder_ != null) {
          return idBuilder_.getMessageOrBuilder();
        } else {
          return id_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.getDefaultInstance() : id_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.ApplicationIdProto id = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder> 
          getIdFieldBuilder() {
        if (idBuilder_ == null) {
          idBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProtoOrBuilder>(
                  getId(),
                  getParentForChildren(),
                  isClean());
          id_ = null;
        }
        return idBuilder_;
      }

      private java.lang.Object user_ = "";
      /**
       * <code>optional string user = 2;</code>
       * @return Whether the user field is set.
       */
      public boolean hasUser() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string user = 2;</code>
       * @return The user.
       */
      public java.lang.String getUser() {
        java.lang.Object ref = user_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            user_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string user = 2;</code>
       * @return The bytes for user.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUserBytes() {
        java.lang.Object ref = user_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          user_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string user = 2;</code>
       * @param value The user to set.
       * @return This builder for chaining.
       */
      public Builder setUser(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        user_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string user = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearUser() {
        user_ = getDefaultInstance().getUser();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string user = 2;</code>
       * @param value The bytes for user to set.
       * @return This builder for chaining.
       */
      public Builder setUserBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        user_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.ByteString credentials_ = org.apache.hadoop.thirdparty.protobuf.ByteString.EMPTY;
      /**
       * <code>optional bytes credentials = 3;</code>
       * @return Whether the credentials field is set.
       */
      @java.lang.Override
      public boolean hasCredentials() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional bytes credentials = 3;</code>
       * @return The credentials.
       */
      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.ByteString getCredentials() {
        return credentials_;
      }
      /**
       * <code>optional bytes credentials = 3;</code>
       * @param value The credentials to set.
       * @return This builder for chaining.
       */
      public Builder setCredentials(org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        credentials_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional bytes credentials = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearCredentials() {
        bitField0_ = (bitField0_ & ~0x00000004);
        credentials_ = getDefaultInstance().getCredentials();
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> acls_ =
        java.util.Collections.emptyList();
      private void ensureAclsIsMutable() {
        if (!((bitField0_ & 0x00000008) != 0)) {
          acls_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto>(acls_);
          bitField0_ |= 0x00000008;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> aclsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> getAclsList() {
        if (aclsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(acls_);
        } else {
          return aclsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public int getAclsCount() {
        if (aclsBuilder_ == null) {
          return acls_.size();
        } else {
          return aclsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto getAcls(int index) {
        if (aclsBuilder_ == null) {
          return acls_.get(index);
        } else {
          return aclsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public Builder setAcls(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) {
        if (aclsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAclsIsMutable();
          acls_.set(index, value);
          onChanged();
        } else {
          aclsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public Builder setAcls(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) {
        if (aclsBuilder_ == null) {
          ensureAclsIsMutable();
          acls_.set(index, builderForValue.build());
          onChanged();
        } else {
          aclsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public Builder addAcls(org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) {
        if (aclsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAclsIsMutable();
          acls_.add(value);
          onChanged();
        } else {
          aclsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public Builder addAcls(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto value) {
        if (aclsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureAclsIsMutable();
          acls_.add(index, value);
          onChanged();
        } else {
          aclsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public Builder addAcls(
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) {
        if (aclsBuilder_ == null) {
          ensureAclsIsMutable();
          acls_.add(builderForValue.build());
          onChanged();
        } else {
          aclsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public Builder addAcls(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder builderForValue) {
        if (aclsBuilder_ == null) {
          ensureAclsIsMutable();
          acls_.add(index, builderForValue.build());
          onChanged();
        } else {
          aclsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public Builder addAllAcls(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto> values) {
        if (aclsBuilder_ == null) {
          ensureAclsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, acls_);
          onChanged();
        } else {
          aclsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public Builder clearAcls() {
        if (aclsBuilder_ == null) {
          acls_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000008);
          onChanged();
        } else {
          aclsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public Builder removeAcls(int index) {
        if (aclsBuilder_ == null) {
          ensureAclsIsMutable();
          acls_.remove(index);
          onChanged();
        } else {
          aclsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder getAclsBuilder(
          int index) {
        return getAclsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder getAclsOrBuilder(
          int index) {
        if (aclsBuilder_ == null) {
          return acls_.get(index);  } else {
          return aclsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
           getAclsOrBuilderList() {
        if (aclsBuilder_ != null) {
          return aclsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(acls_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder addAclsBuilder() {
        return getAclsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder addAclsBuilder(
          int index) {
        return getAclsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.ApplicationACLMapProto acls = 4;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder> 
           getAclsBuilderList() {
        return getAclsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder> 
          getAclsFieldBuilder() {
        if (aclsBuilder_ == null) {
          aclsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.ApplicationACLMapProtoOrBuilder>(
                  acls_,
                  ((bitField0_ & 0x00000008) != 0),
                  getParentForChildren(),
                  isClean());
          acls_ = null;
        }
        return aclsBuilder_;
      }

      private org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto logAggregationContext_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder> logAggregationContextBuilder_;
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
       * @return Whether the logAggregationContext field is set.
       */
      public boolean hasLogAggregationContext() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
       * @return The logAggregationContext.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto getLogAggregationContext() {
        if (logAggregationContextBuilder_ == null) {
          return logAggregationContext_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
        } else {
          return logAggregationContextBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
       */
      public Builder setLogAggregationContext(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto value) {
        if (logAggregationContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          logAggregationContext_ = value;
        } else {
          logAggregationContextBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
       */
      public Builder setLogAggregationContext(
          org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder builderForValue) {
        if (logAggregationContextBuilder_ == null) {
          logAggregationContext_ = builderForValue.build();
        } else {
          logAggregationContextBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
       */
      public Builder mergeLogAggregationContext(org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto value) {
        if (logAggregationContextBuilder_ == null) {
          if (((bitField0_ & 0x00000010) != 0) &&
            logAggregationContext_ != null &&
            logAggregationContext_ != org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance()) {
            getLogAggregationContextBuilder().mergeFrom(value);
          } else {
            logAggregationContext_ = value;
          }
        } else {
          logAggregationContextBuilder_.mergeFrom(value);
        }
        if (logAggregationContext_ != null) {
          bitField0_ |= 0x00000010;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
       */
      public Builder clearLogAggregationContext() {
        bitField0_ = (bitField0_ & ~0x00000010);
        logAggregationContext_ = null;
        if (logAggregationContextBuilder_ != null) {
          logAggregationContextBuilder_.dispose();
          logAggregationContextBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder getLogAggregationContextBuilder() {
        bitField0_ |= 0x00000010;
        onChanged();
        return getLogAggregationContextFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder getLogAggregationContextOrBuilder() {
        if (logAggregationContextBuilder_ != null) {
          return logAggregationContextBuilder_.getMessageOrBuilder();
        } else {
          return logAggregationContext_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.getDefaultInstance() : logAggregationContext_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.LogAggregationContextProto log_aggregation_context = 5;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder> 
          getLogAggregationContextFieldBuilder() {
        if (logAggregationContextBuilder_ == null) {
          logAggregationContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LogAggregationContextProtoOrBuilder>(
                  getLogAggregationContext(),
                  getParentForChildren(),
                  isClean());
          logAggregationContext_ = null;
        }
        return logAggregationContextBuilder_;
      }

      private long appLogAggregationInitedTime_ = -1L;
      /**
       * <code>optional int64 appLogAggregationInitedTime = 6 [default = -1];</code>
       * @return Whether the appLogAggregationInitedTime field is set.
       */
      @java.lang.Override
      public boolean hasAppLogAggregationInitedTime() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>optional int64 appLogAggregationInitedTime = 6 [default = -1];</code>
       * @return The appLogAggregationInitedTime.
       */
      @java.lang.Override
      public long getAppLogAggregationInitedTime() {
        return appLogAggregationInitedTime_;
      }
      /**
       * <code>optional int64 appLogAggregationInitedTime = 6 [default = -1];</code>
       * @param value The appLogAggregationInitedTime to set.
       * @return This builder for chaining.
       */
      public Builder setAppLogAggregationInitedTime(long value) {

        appLogAggregationInitedTime_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 appLogAggregationInitedTime = 6 [default = -1];</code>
       * @return This builder for chaining.
       */
      public Builder clearAppLogAggregationInitedTime() {
        bitField0_ = (bitField0_ & ~0x00000020);
        appLogAggregationInitedTime_ = -1L;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto flowContext_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.Builder, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProtoOrBuilder> flowContextBuilder_;
      /**
       * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
       * @return Whether the flowContext field is set.
       */
      public boolean hasFlowContext() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
       * @return The flowContext.
       */
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto getFlowContext() {
        if (flowContextBuilder_ == null) {
          return flowContext_ == null ? org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.getDefaultInstance() : flowContext_;
        } else {
          return flowContextBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
       */
      public Builder setFlowContext(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto value) {
        if (flowContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          flowContext_ = value;
        } else {
          flowContextBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
       */
      public Builder setFlowContext(
          org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.Builder builderForValue) {
        if (flowContextBuilder_ == null) {
          flowContext_ = builderForValue.build();
        } else {
          flowContextBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
       */
      public Builder mergeFlowContext(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto value) {
        if (flowContextBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0) &&
            flowContext_ != null &&
            flowContext_ != org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.getDefaultInstance()) {
            getFlowContextBuilder().mergeFrom(value);
          } else {
            flowContext_ = value;
          }
        } else {
          flowContextBuilder_.mergeFrom(value);
        }
        if (flowContext_ != null) {
          bitField0_ |= 0x00000040;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
       */
      public Builder clearFlowContext() {
        bitField0_ = (bitField0_ & ~0x00000040);
        flowContext_ = null;
        if (flowContextBuilder_ != null) {
          flowContextBuilder_.dispose();
          flowContextBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.Builder getFlowContextBuilder() {
        bitField0_ |= 0x00000040;
        onChanged();
        return getFlowContextFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProtoOrBuilder getFlowContextOrBuilder() {
        if (flowContextBuilder_ != null) {
          return flowContextBuilder_.getMessageOrBuilder();
        } else {
          return flowContext_ == null ?
              org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.getDefaultInstance() : flowContext_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.FlowContextProto flowContext = 7;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.Builder, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProtoOrBuilder> 
          getFlowContextFieldBuilder() {
        if (flowContextBuilder_ == null) {
          flowContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.Builder, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProtoOrBuilder>(
                  getFlowContext(),
                  getParentForChildren(),
                  isClean());
          flowContext_ = null;
        }
        return flowContextBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ContainerManagerApplicationProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ContainerManagerApplicationProto)
    private static final org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ContainerManagerApplicationProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ContainerManagerApplicationProto>() {
      @java.lang.Override
      public ContainerManagerApplicationProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ContainerManagerApplicationProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ContainerManagerApplicationProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface DeletionServiceDeleteTaskProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.DeletionServiceDeleteTaskProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional int32 id = 1;</code>
     * @return Whether the id field is set.
     */
    boolean hasId();
    /**
     * <code>optional int32 id = 1;</code>
     * @return The id.
     */
    int getId();

    /**
     * <code>optional string user = 2;</code>
     * @return Whether the user field is set.
     */
    boolean hasUser();
    /**
     * <code>optional string user = 2;</code>
     * @return The user.
     */
    java.lang.String getUser();
    /**
     * <code>optional string user = 2;</code>
     * @return The bytes for user.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes();

    /**
     * <code>optional string subdir = 3;</code>
     * @return Whether the subdir field is set.
     */
    boolean hasSubdir();
    /**
     * <code>optional string subdir = 3;</code>
     * @return The subdir.
     */
    java.lang.String getSubdir();
    /**
     * <code>optional string subdir = 3;</code>
     * @return The bytes for subdir.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubdirBytes();

    /**
     * <code>optional int64 deletionTime = 4;</code>
     * @return Whether the deletionTime field is set.
     */
    boolean hasDeletionTime();
    /**
     * <code>optional int64 deletionTime = 4;</code>
     * @return The deletionTime.
     */
    long getDeletionTime();

    /**
     * <code>repeated string basedirs = 5;</code>
     * @return A list containing the basedirs.
     */
    java.util.List<java.lang.String>
        getBasedirsList();
    /**
     * <code>repeated string basedirs = 5;</code>
     * @return The count of basedirs.
     */
    int getBasedirsCount();
    /**
     * <code>repeated string basedirs = 5;</code>
     * @param index The index of the element to return.
     * @return The basedirs at the given index.
     */
    java.lang.String getBasedirs(int index);
    /**
     * <code>repeated string basedirs = 5;</code>
     * @param index The index of the value to return.
     * @return The bytes of the basedirs at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getBasedirsBytes(int index);

    /**
     * <code>repeated int32 successorIds = 6;</code>
     * @return A list containing the successorIds.
     */
    java.util.List<java.lang.Integer> getSuccessorIdsList();
    /**
     * <code>repeated int32 successorIds = 6;</code>
     * @return The count of successorIds.
     */
    int getSuccessorIdsCount();
    /**
     * <code>repeated int32 successorIds = 6;</code>
     * @param index The index of the element to return.
     * @return The successorIds at the given index.
     */
    int getSuccessorIds(int index);

    /**
     * <code>optional string taskType = 7;</code>
     * @return Whether the taskType field is set.
     */
    boolean hasTaskType();
    /**
     * <code>optional string taskType = 7;</code>
     * @return The taskType.
     */
    java.lang.String getTaskType();
    /**
     * <code>optional string taskType = 7;</code>
     * @return The bytes for taskType.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTaskTypeBytes();

    /**
     * <code>optional string dockerContainerId = 8;</code>
     * @return Whether the dockerContainerId field is set.
     */
    boolean hasDockerContainerId();
    /**
     * <code>optional string dockerContainerId = 8;</code>
     * @return The dockerContainerId.
     */
    java.lang.String getDockerContainerId();
    /**
     * <code>optional string dockerContainerId = 8;</code>
     * @return The bytes for dockerContainerId.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getDockerContainerIdBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.DeletionServiceDeleteTaskProto}
   */
  public static final class DeletionServiceDeleteTaskProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.DeletionServiceDeleteTaskProto)
      DeletionServiceDeleteTaskProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use DeletionServiceDeleteTaskProto.newBuilder() to construct.
    private DeletionServiceDeleteTaskProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private DeletionServiceDeleteTaskProto() {
      user_ = "";
      subdir_ = "";
      basedirs_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      successorIds_ = emptyIntList();
      taskType_ = "";
      dockerContainerId_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new DeletionServiceDeleteTaskProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_DeletionServiceDeleteTaskProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_DeletionServiceDeleteTaskProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto.class, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto.Builder.class);
    }

    private int bitField0_;
    public static final int ID_FIELD_NUMBER = 1;
    private int id_ = 0;
    /**
     * <code>optional int32 id = 1;</code>
     * @return Whether the id field is set.
     */
    @java.lang.Override
    public boolean hasId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional int32 id = 1;</code>
     * @return The id.
     */
    @java.lang.Override
    public int getId() {
      return id_;
    }

    public static final int USER_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object user_ = "";
    /**
     * <code>optional string user = 2;</code>
     * @return Whether the user field is set.
     */
    @java.lang.Override
    public boolean hasUser() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string user = 2;</code>
     * @return The user.
     */
    @java.lang.Override
    public java.lang.String getUser() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          user_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string user = 2;</code>
     * @return The bytes for user.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        user_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int SUBDIR_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object subdir_ = "";
    /**
     * <code>optional string subdir = 3;</code>
     * @return Whether the subdir field is set.
     */
    @java.lang.Override
    public boolean hasSubdir() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional string subdir = 3;</code>
     * @return The subdir.
     */
    @java.lang.Override
    public java.lang.String getSubdir() {
      java.lang.Object ref = subdir_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          subdir_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string subdir = 3;</code>
     * @return The bytes for subdir.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getSubdirBytes() {
      java.lang.Object ref = subdir_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        subdir_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int DELETIONTIME_FIELD_NUMBER = 4;
    private long deletionTime_ = 0L;
    /**
     * <code>optional int64 deletionTime = 4;</code>
     * @return Whether the deletionTime field is set.
     */
    @java.lang.Override
    public boolean hasDeletionTime() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>optional int64 deletionTime = 4;</code>
     * @return The deletionTime.
     */
    @java.lang.Override
    public long getDeletionTime() {
      return deletionTime_;
    }

    public static final int BASEDIRS_FIELD_NUMBER = 5;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList basedirs_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string basedirs = 5;</code>
     * @return A list containing the basedirs.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getBasedirsList() {
      return basedirs_;
    }
    /**
     * <code>repeated string basedirs = 5;</code>
     * @return The count of basedirs.
     */
    public int getBasedirsCount() {
      return basedirs_.size();
    }
    /**
     * <code>repeated string basedirs = 5;</code>
     * @param index The index of the element to return.
     * @return The basedirs at the given index.
     */
    public java.lang.String getBasedirs(int index) {
      return basedirs_.get(index);
    }
    /**
     * <code>repeated string basedirs = 5;</code>
     * @param index The index of the value to return.
     * @return The bytes of the basedirs at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getBasedirsBytes(int index) {
      return basedirs_.getByteString(index);
    }

    public static final int SUCCESSORIDS_FIELD_NUMBER = 6;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.Internal.IntList successorIds_ =
        emptyIntList();
    /**
     * <code>repeated int32 successorIds = 6;</code>
     * @return A list containing the successorIds.
     */
    @java.lang.Override
    public java.util.List<java.lang.Integer>
        getSuccessorIdsList() {
      return successorIds_;
    }
    /**
     * <code>repeated int32 successorIds = 6;</code>
     * @return The count of successorIds.
     */
    public int getSuccessorIdsCount() {
      return successorIds_.size();
    }
    /**
     * <code>repeated int32 successorIds = 6;</code>
     * @param index The index of the element to return.
     * @return The successorIds at the given index.
     */
    public int getSuccessorIds(int index) {
      return successorIds_.getInt(index);
    }

    public static final int TASKTYPE_FIELD_NUMBER = 7;
    @SuppressWarnings("serial")
    private volatile java.lang.Object taskType_ = "";
    /**
     * <code>optional string taskType = 7;</code>
     * @return Whether the taskType field is set.
     */
    @java.lang.Override
    public boolean hasTaskType() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>optional string taskType = 7;</code>
     * @return The taskType.
     */
    @java.lang.Override
    public java.lang.String getTaskType() {
      java.lang.Object ref = taskType_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          taskType_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string taskType = 7;</code>
     * @return The bytes for taskType.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTaskTypeBytes() {
      java.lang.Object ref = taskType_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        taskType_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int DOCKERCONTAINERID_FIELD_NUMBER = 8;
    @SuppressWarnings("serial")
    private volatile java.lang.Object dockerContainerId_ = "";
    /**
     * <code>optional string dockerContainerId = 8;</code>
     * @return Whether the dockerContainerId field is set.
     */
    @java.lang.Override
    public boolean hasDockerContainerId() {
      return ((bitField0_ & 0x00000020) != 0);
    }
    /**
     * <code>optional string dockerContainerId = 8;</code>
     * @return The dockerContainerId.
     */
    @java.lang.Override
    public java.lang.String getDockerContainerId() {
      java.lang.Object ref = dockerContainerId_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          dockerContainerId_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string dockerContainerId = 8;</code>
     * @return The bytes for dockerContainerId.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getDockerContainerIdBytes() {
      java.lang.Object ref = dockerContainerId_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        dockerContainerId_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeInt32(1, id_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, user_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, subdir_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeInt64(4, deletionTime_);
      }
      for (int i = 0; i < basedirs_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 5, basedirs_.getRaw(i));
      }
      for (int i = 0; i < successorIds_.size(); i++) {
        output.writeInt32(6, successorIds_.getInt(i));
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 7, taskType_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 8, dockerContainerId_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt32Size(1, id_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, user_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, subdir_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(4, deletionTime_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < basedirs_.size(); i++) {
          dataSize += computeStringSizeNoTag(basedirs_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getBasedirsList().size();
      }
      {
        int dataSize = 0;
        for (int i = 0; i < successorIds_.size(); i++) {
          dataSize += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
            .computeInt32SizeNoTag(successorIds_.getInt(i));
        }
        size += dataSize;
        size += 1 * getSuccessorIdsList().size();
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(7, taskType_);
      }
      if (((bitField0_ & 0x00000020) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(8, dockerContainerId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto other = (org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto) obj;

      if (hasId() != other.hasId()) return false;
      if (hasId()) {
        if (getId()
            != other.getId()) return false;
      }
      if (hasUser() != other.hasUser()) return false;
      if (hasUser()) {
        if (!getUser()
            .equals(other.getUser())) return false;
      }
      if (hasSubdir() != other.hasSubdir()) return false;
      if (hasSubdir()) {
        if (!getSubdir()
            .equals(other.getSubdir())) return false;
      }
      if (hasDeletionTime() != other.hasDeletionTime()) return false;
      if (hasDeletionTime()) {
        if (getDeletionTime()
            != other.getDeletionTime()) return false;
      }
      if (!getBasedirsList()
          .equals(other.getBasedirsList())) return false;
      if (!getSuccessorIdsList()
          .equals(other.getSuccessorIdsList())) return false;
      if (hasTaskType() != other.hasTaskType()) return false;
      if (hasTaskType()) {
        if (!getTaskType()
            .equals(other.getTaskType())) return false;
      }
      if (hasDockerContainerId() != other.hasDockerContainerId()) return false;
      if (hasDockerContainerId()) {
        if (!getDockerContainerId()
            .equals(other.getDockerContainerId())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasId()) {
        hash = (37 * hash) + ID_FIELD_NUMBER;
        hash = (53 * hash) + getId();
      }
      if (hasUser()) {
        hash = (37 * hash) + USER_FIELD_NUMBER;
        hash = (53 * hash) + getUser().hashCode();
      }
      if (hasSubdir()) {
        hash = (37 * hash) + SUBDIR_FIELD_NUMBER;
        hash = (53 * hash) + getSubdir().hashCode();
      }
      if (hasDeletionTime()) {
        hash = (37 * hash) + DELETIONTIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getDeletionTime());
      }
      if (getBasedirsCount() > 0) {
        hash = (37 * hash) + BASEDIRS_FIELD_NUMBER;
        hash = (53 * hash) + getBasedirsList().hashCode();
      }
      if (getSuccessorIdsCount() > 0) {
        hash = (37 * hash) + SUCCESSORIDS_FIELD_NUMBER;
        hash = (53 * hash) + getSuccessorIdsList().hashCode();
      }
      if (hasTaskType()) {
        hash = (37 * hash) + TASKTYPE_FIELD_NUMBER;
        hash = (53 * hash) + getTaskType().hashCode();
      }
      if (hasDockerContainerId()) {
        hash = (37 * hash) + DOCKERCONTAINERID_FIELD_NUMBER;
        hash = (53 * hash) + getDockerContainerId().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.DeletionServiceDeleteTaskProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.DeletionServiceDeleteTaskProto)
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_DeletionServiceDeleteTaskProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_DeletionServiceDeleteTaskProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto.class, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        id_ = 0;
        user_ = "";
        subdir_ = "";
        deletionTime_ = 0L;
        basedirs_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        successorIds_ = emptyIntList();
        taskType_ = "";
        dockerContainerId_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_DeletionServiceDeleteTaskProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto build() {
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto result = new org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.id_ = id_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.user_ = user_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.subdir_ = subdir_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.deletionTime_ = deletionTime_;
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          basedirs_.makeImmutable();
          result.basedirs_ = basedirs_;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          successorIds_.makeImmutable();
          result.successorIds_ = successorIds_;
        }
        if (((from_bitField0_ & 0x00000040) != 0)) {
          result.taskType_ = taskType_;
          to_bitField0_ |= 0x00000010;
        }
        if (((from_bitField0_ & 0x00000080) != 0)) {
          result.dockerContainerId_ = dockerContainerId_;
          to_bitField0_ |= 0x00000020;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto.getDefaultInstance()) return this;
        if (other.hasId()) {
          setId(other.getId());
        }
        if (other.hasUser()) {
          user_ = other.user_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasSubdir()) {
          subdir_ = other.subdir_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasDeletionTime()) {
          setDeletionTime(other.getDeletionTime());
        }
        if (!other.basedirs_.isEmpty()) {
          if (basedirs_.isEmpty()) {
            basedirs_ = other.basedirs_;
            bitField0_ |= 0x00000010;
          } else {
            ensureBasedirsIsMutable();
            basedirs_.addAll(other.basedirs_);
          }
          onChanged();
        }
        if (!other.successorIds_.isEmpty()) {
          if (successorIds_.isEmpty()) {
            successorIds_ = other.successorIds_;
            successorIds_.makeImmutable();
            bitField0_ |= 0x00000020;
          } else {
            ensureSuccessorIdsIsMutable();
            successorIds_.addAll(other.successorIds_);
          }
          onChanged();
        }
        if (other.hasTaskType()) {
          taskType_ = other.taskType_;
          bitField0_ |= 0x00000040;
          onChanged();
        }
        if (other.hasDockerContainerId()) {
          dockerContainerId_ = other.dockerContainerId_;
          bitField0_ |= 0x00000080;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                id_ = input.readInt32();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 18: {
                user_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 26: {
                subdir_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 32: {
                deletionTime_ = input.readInt64();
                bitField0_ |= 0x00000008;
                break;
              } // case 32
              case 42: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureBasedirsIsMutable();
                basedirs_.add(bs);
                break;
              } // case 42
              case 48: {
                int v = input.readInt32();
                ensureSuccessorIdsIsMutable();
                successorIds_.addInt(v);
                break;
              } // case 48
              case 50: {
                int length = input.readRawVarint32();
                int limit = input.pushLimit(length);
                ensureSuccessorIdsIsMutable();
                while (input.getBytesUntilLimit() > 0) {
                  successorIds_.addInt(input.readInt32());
                }
                input.popLimit(limit);
                break;
              } // case 50
              case 58: {
                taskType_ = input.readBytes();
                bitField0_ |= 0x00000040;
                break;
              } // case 58
              case 66: {
                dockerContainerId_ = input.readBytes();
                bitField0_ |= 0x00000080;
                break;
              } // case 66
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int id_ ;
      /**
       * <code>optional int32 id = 1;</code>
       * @return Whether the id field is set.
       */
      @java.lang.Override
      public boolean hasId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional int32 id = 1;</code>
       * @return The id.
       */
      @java.lang.Override
      public int getId() {
        return id_;
      }
      /**
       * <code>optional int32 id = 1;</code>
       * @param value The id to set.
       * @return This builder for chaining.
       */
      public Builder setId(int value) {

        id_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional int32 id = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearId() {
        bitField0_ = (bitField0_ & ~0x00000001);
        id_ = 0;
        onChanged();
        return this;
      }

      private java.lang.Object user_ = "";
      /**
       * <code>optional string user = 2;</code>
       * @return Whether the user field is set.
       */
      public boolean hasUser() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string user = 2;</code>
       * @return The user.
       */
      public java.lang.String getUser() {
        java.lang.Object ref = user_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            user_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string user = 2;</code>
       * @return The bytes for user.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUserBytes() {
        java.lang.Object ref = user_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          user_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string user = 2;</code>
       * @param value The user to set.
       * @return This builder for chaining.
       */
      public Builder setUser(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        user_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string user = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearUser() {
        user_ = getDefaultInstance().getUser();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string user = 2;</code>
       * @param value The bytes for user to set.
       * @return This builder for chaining.
       */
      public Builder setUserBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        user_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private java.lang.Object subdir_ = "";
      /**
       * <code>optional string subdir = 3;</code>
       * @return Whether the subdir field is set.
       */
      public boolean hasSubdir() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string subdir = 3;</code>
       * @return The subdir.
       */
      public java.lang.String getSubdir() {
        java.lang.Object ref = subdir_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            subdir_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string subdir = 3;</code>
       * @return The bytes for subdir.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getSubdirBytes() {
        java.lang.Object ref = subdir_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          subdir_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string subdir = 3;</code>
       * @param value The subdir to set.
       * @return This builder for chaining.
       */
      public Builder setSubdir(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        subdir_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string subdir = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearSubdir() {
        subdir_ = getDefaultInstance().getSubdir();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string subdir = 3;</code>
       * @param value The bytes for subdir to set.
       * @return This builder for chaining.
       */
      public Builder setSubdirBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        subdir_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private long deletionTime_ ;
      /**
       * <code>optional int64 deletionTime = 4;</code>
       * @return Whether the deletionTime field is set.
       */
      @java.lang.Override
      public boolean hasDeletionTime() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>optional int64 deletionTime = 4;</code>
       * @return The deletionTime.
       */
      @java.lang.Override
      public long getDeletionTime() {
        return deletionTime_;
      }
      /**
       * <code>optional int64 deletionTime = 4;</code>
       * @param value The deletionTime to set.
       * @return This builder for chaining.
       */
      public Builder setDeletionTime(long value) {

        deletionTime_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 deletionTime = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearDeletionTime() {
        bitField0_ = (bitField0_ & ~0x00000008);
        deletionTime_ = 0L;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList basedirs_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureBasedirsIsMutable() {
        if (!basedirs_.isModifiable()) {
          basedirs_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(basedirs_);
        }
        bitField0_ |= 0x00000010;
      }
      /**
       * <code>repeated string basedirs = 5;</code>
       * @return A list containing the basedirs.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getBasedirsList() {
        basedirs_.makeImmutable();
        return basedirs_;
      }
      /**
       * <code>repeated string basedirs = 5;</code>
       * @return The count of basedirs.
       */
      public int getBasedirsCount() {
        return basedirs_.size();
      }
      /**
       * <code>repeated string basedirs = 5;</code>
       * @param index The index of the element to return.
       * @return The basedirs at the given index.
       */
      public java.lang.String getBasedirs(int index) {
        return basedirs_.get(index);
      }
      /**
       * <code>repeated string basedirs = 5;</code>
       * @param index The index of the value to return.
       * @return The bytes of the basedirs at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getBasedirsBytes(int index) {
        return basedirs_.getByteString(index);
      }
      /**
       * <code>repeated string basedirs = 5;</code>
       * @param index The index to set the value at.
       * @param value The basedirs to set.
       * @return This builder for chaining.
       */
      public Builder setBasedirs(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureBasedirsIsMutable();
        basedirs_.set(index, value);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string basedirs = 5;</code>
       * @param value The basedirs to add.
       * @return This builder for chaining.
       */
      public Builder addBasedirs(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureBasedirsIsMutable();
        basedirs_.add(value);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string basedirs = 5;</code>
       * @param values The basedirs to add.
       * @return This builder for chaining.
       */
      public Builder addAllBasedirs(
          java.lang.Iterable<java.lang.String> values) {
        ensureBasedirsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, basedirs_);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string basedirs = 5;</code>
       * @return This builder for chaining.
       */
      public Builder clearBasedirs() {
        basedirs_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000010);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string basedirs = 5;</code>
       * @param value The bytes of the basedirs to add.
       * @return This builder for chaining.
       */
      public Builder addBasedirsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureBasedirsIsMutable();
        basedirs_.add(value);
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.Internal.IntList successorIds_ = emptyIntList();
      private void ensureSuccessorIdsIsMutable() {
        if (!successorIds_.isModifiable()) {
          successorIds_ = makeMutableCopy(successorIds_);
        }
        bitField0_ |= 0x00000020;
      }
      /**
       * <code>repeated int32 successorIds = 6;</code>
       * @return A list containing the successorIds.
       */
      public java.util.List<java.lang.Integer>
          getSuccessorIdsList() {
        successorIds_.makeImmutable();
        return successorIds_;
      }
      /**
       * <code>repeated int32 successorIds = 6;</code>
       * @return The count of successorIds.
       */
      public int getSuccessorIdsCount() {
        return successorIds_.size();
      }
      /**
       * <code>repeated int32 successorIds = 6;</code>
       * @param index The index of the element to return.
       * @return The successorIds at the given index.
       */
      public int getSuccessorIds(int index) {
        return successorIds_.getInt(index);
      }
      /**
       * <code>repeated int32 successorIds = 6;</code>
       * @param index The index to set the value at.
       * @param value The successorIds to set.
       * @return This builder for chaining.
       */
      public Builder setSuccessorIds(
          int index, int value) {

        ensureSuccessorIdsIsMutable();
        successorIds_.setInt(index, value);
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>repeated int32 successorIds = 6;</code>
       * @param value The successorIds to add.
       * @return This builder for chaining.
       */
      public Builder addSuccessorIds(int value) {

        ensureSuccessorIdsIsMutable();
        successorIds_.addInt(value);
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>repeated int32 successorIds = 6;</code>
       * @param values The successorIds to add.
       * @return This builder for chaining.
       */
      public Builder addAllSuccessorIds(
          java.lang.Iterable<? extends java.lang.Integer> values) {
        ensureSuccessorIdsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, successorIds_);
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>repeated int32 successorIds = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearSuccessorIds() {
        successorIds_ = emptyIntList();
        bitField0_ = (bitField0_ & ~0x00000020);
        onChanged();
        return this;
      }

      private java.lang.Object taskType_ = "";
      /**
       * <code>optional string taskType = 7;</code>
       * @return Whether the taskType field is set.
       */
      public boolean hasTaskType() {
        return ((bitField0_ & 0x00000040) != 0);
      }
      /**
       * <code>optional string taskType = 7;</code>
       * @return The taskType.
       */
      public java.lang.String getTaskType() {
        java.lang.Object ref = taskType_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            taskType_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string taskType = 7;</code>
       * @return The bytes for taskType.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTaskTypeBytes() {
        java.lang.Object ref = taskType_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          taskType_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string taskType = 7;</code>
       * @param value The taskType to set.
       * @return This builder for chaining.
       */
      public Builder setTaskType(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        taskType_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }
      /**
       * <code>optional string taskType = 7;</code>
       * @return This builder for chaining.
       */
      public Builder clearTaskType() {
        taskType_ = getDefaultInstance().getTaskType();
        bitField0_ = (bitField0_ & ~0x00000040);
        onChanged();
        return this;
      }
      /**
       * <code>optional string taskType = 7;</code>
       * @param value The bytes for taskType to set.
       * @return This builder for chaining.
       */
      public Builder setTaskTypeBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        taskType_ = value;
        bitField0_ |= 0x00000040;
        onChanged();
        return this;
      }

      private java.lang.Object dockerContainerId_ = "";
      /**
       * <code>optional string dockerContainerId = 8;</code>
       * @return Whether the dockerContainerId field is set.
       */
      public boolean hasDockerContainerId() {
        return ((bitField0_ & 0x00000080) != 0);
      }
      /**
       * <code>optional string dockerContainerId = 8;</code>
       * @return The dockerContainerId.
       */
      public java.lang.String getDockerContainerId() {
        java.lang.Object ref = dockerContainerId_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            dockerContainerId_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string dockerContainerId = 8;</code>
       * @return The bytes for dockerContainerId.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getDockerContainerIdBytes() {
        java.lang.Object ref = dockerContainerId_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          dockerContainerId_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string dockerContainerId = 8;</code>
       * @param value The dockerContainerId to set.
       * @return This builder for chaining.
       */
      public Builder setDockerContainerId(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        dockerContainerId_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      /**
       * <code>optional string dockerContainerId = 8;</code>
       * @return This builder for chaining.
       */
      public Builder clearDockerContainerId() {
        dockerContainerId_ = getDefaultInstance().getDockerContainerId();
        bitField0_ = (bitField0_ & ~0x00000080);
        onChanged();
        return this;
      }
      /**
       * <code>optional string dockerContainerId = 8;</code>
       * @param value The bytes for dockerContainerId to set.
       * @return This builder for chaining.
       */
      public Builder setDockerContainerIdBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        dockerContainerId_ = value;
        bitField0_ |= 0x00000080;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.DeletionServiceDeleteTaskProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.DeletionServiceDeleteTaskProto)
    private static final org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<DeletionServiceDeleteTaskProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<DeletionServiceDeleteTaskProto>() {
      @java.lang.Override
      public DeletionServiceDeleteTaskProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<DeletionServiceDeleteTaskProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<DeletionServiceDeleteTaskProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface LocalizedResourceProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.LocalizedResourceProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
     * @return Whether the resource field is set.
     */
    boolean hasResource();
    /**
     * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
     * @return The resource.
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getResource();
    /**
     * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder getResourceOrBuilder();

    /**
     * <code>optional string localPath = 2;</code>
     * @return Whether the localPath field is set.
     */
    boolean hasLocalPath();
    /**
     * <code>optional string localPath = 2;</code>
     * @return The localPath.
     */
    java.lang.String getLocalPath();
    /**
     * <code>optional string localPath = 2;</code>
     * @return The bytes for localPath.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getLocalPathBytes();

    /**
     * <code>optional int64 size = 3;</code>
     * @return Whether the size field is set.
     */
    boolean hasSize();
    /**
     * <code>optional int64 size = 3;</code>
     * @return The size.
     */
    long getSize();
  }
  /**
   * Protobuf type {@code hadoop.yarn.LocalizedResourceProto}
   */
  public static final class LocalizedResourceProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.LocalizedResourceProto)
      LocalizedResourceProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use LocalizedResourceProto.newBuilder() to construct.
    private LocalizedResourceProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private LocalizedResourceProto() {
      localPath_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new LocalizedResourceProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_LocalizedResourceProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_LocalizedResourceProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto.class, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto.Builder.class);
    }

    private int bitField0_;
    public static final int RESOURCE_FIELD_NUMBER = 1;
    private org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto resource_;
    /**
     * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
     * @return Whether the resource field is set.
     */
    @java.lang.Override
    public boolean hasResource() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
     * @return The resource.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getResource() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : resource_;
    }
    /**
     * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder getResourceOrBuilder() {
      return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : resource_;
    }

    public static final int LOCALPATH_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object localPath_ = "";
    /**
     * <code>optional string localPath = 2;</code>
     * @return Whether the localPath field is set.
     */
    @java.lang.Override
    public boolean hasLocalPath() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string localPath = 2;</code>
     * @return The localPath.
     */
    @java.lang.Override
    public java.lang.String getLocalPath() {
      java.lang.Object ref = localPath_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          localPath_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string localPath = 2;</code>
     * @return The bytes for localPath.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getLocalPathBytes() {
      java.lang.Object ref = localPath_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        localPath_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int SIZE_FIELD_NUMBER = 3;
    private long size_ = 0L;
    /**
     * <code>optional int64 size = 3;</code>
     * @return Whether the size field is set.
     */
    @java.lang.Override
    public boolean hasSize() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int64 size = 3;</code>
     * @return The size.
     */
    @java.lang.Override
    public long getSize() {
      return size_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeMessage(1, getResource());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, localPath_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(3, size_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(1, getResource());
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, localPath_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(3, size_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto other = (org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto) obj;

      if (hasResource() != other.hasResource()) return false;
      if (hasResource()) {
        if (!getResource()
            .equals(other.getResource())) return false;
      }
      if (hasLocalPath() != other.hasLocalPath()) return false;
      if (hasLocalPath()) {
        if (!getLocalPath()
            .equals(other.getLocalPath())) return false;
      }
      if (hasSize() != other.hasSize()) return false;
      if (hasSize()) {
        if (getSize()
            != other.getSize()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasResource()) {
        hash = (37 * hash) + RESOURCE_FIELD_NUMBER;
        hash = (53 * hash) + getResource().hashCode();
      }
      if (hasLocalPath()) {
        hash = (37 * hash) + LOCALPATH_FIELD_NUMBER;
        hash = (53 * hash) + getLocalPath().hashCode();
      }
      if (hasSize()) {
        hash = (37 * hash) + SIZE_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getSize());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.LocalizedResourceProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.LocalizedResourceProto)
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_LocalizedResourceProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_LocalizedResourceProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto.class, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getResourceFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        localPath_ = "";
        size_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_LocalizedResourceProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto build() {
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto result = new org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.resource_ = resourceBuilder_ == null
              ? resource_
              : resourceBuilder_.build();
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.localPath_ = localPath_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.size_ = size_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto.getDefaultInstance()) return this;
        if (other.hasResource()) {
          mergeResource(other.getResource());
        }
        if (other.hasLocalPath()) {
          localPath_ = other.localPath_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasSize()) {
          setSize(other.getSize());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                input.readMessage(
                    getResourceFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                localPath_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                size_ = input.readInt64();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto resource_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder> resourceBuilder_;
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
       * @return Whether the resource field is set.
       */
      public boolean hasResource() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
       * @return The resource.
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto getResource() {
        if (resourceBuilder_ == null) {
          return resource_ == null ? org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : resource_;
        } else {
          return resourceBuilder_.getMessage();
        }
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
       */
      public Builder setResource(org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto value) {
        if (resourceBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          resource_ = value;
        } else {
          resourceBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
       */
      public Builder setResource(
          org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder builderForValue) {
        if (resourceBuilder_ == null) {
          resource_ = builderForValue.build();
        } else {
          resourceBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
       */
      public Builder mergeResource(org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto value) {
        if (resourceBuilder_ == null) {
          if (((bitField0_ & 0x00000001) != 0) &&
            resource_ != null &&
            resource_ != org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance()) {
            getResourceBuilder().mergeFrom(value);
          } else {
            resource_ = value;
          }
        } else {
          resourceBuilder_.mergeFrom(value);
        }
        if (resource_ != null) {
          bitField0_ |= 0x00000001;
          onChanged();
        }
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
       */
      public Builder clearResource() {
        bitField0_ = (bitField0_ & ~0x00000001);
        resource_ = null;
        if (resourceBuilder_ != null) {
          resourceBuilder_.dispose();
          resourceBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder getResourceBuilder() {
        bitField0_ |= 0x00000001;
        onChanged();
        return getResourceFieldBuilder().getBuilder();
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder getResourceOrBuilder() {
        if (resourceBuilder_ != null) {
          return resourceBuilder_.getMessageOrBuilder();
        } else {
          return resource_ == null ?
              org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.getDefaultInstance() : resource_;
        }
      }
      /**
       * <code>optional .hadoop.yarn.LocalResourceProto resource = 1;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder> 
          getResourceFieldBuilder() {
        if (resourceBuilder_ == null) {
          resourceBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProtoOrBuilder>(
                  getResource(),
                  getParentForChildren(),
                  isClean());
          resource_ = null;
        }
        return resourceBuilder_;
      }

      private java.lang.Object localPath_ = "";
      /**
       * <code>optional string localPath = 2;</code>
       * @return Whether the localPath field is set.
       */
      public boolean hasLocalPath() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string localPath = 2;</code>
       * @return The localPath.
       */
      public java.lang.String getLocalPath() {
        java.lang.Object ref = localPath_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            localPath_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string localPath = 2;</code>
       * @return The bytes for localPath.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getLocalPathBytes() {
        java.lang.Object ref = localPath_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          localPath_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string localPath = 2;</code>
       * @param value The localPath to set.
       * @return This builder for chaining.
       */
      public Builder setLocalPath(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        localPath_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string localPath = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearLocalPath() {
        localPath_ = getDefaultInstance().getLocalPath();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string localPath = 2;</code>
       * @param value The bytes for localPath to set.
       * @return This builder for chaining.
       */
      public Builder setLocalPathBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        localPath_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private long size_ ;
      /**
       * <code>optional int64 size = 3;</code>
       * @return Whether the size field is set.
       */
      @java.lang.Override
      public boolean hasSize() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int64 size = 3;</code>
       * @return The size.
       */
      @java.lang.Override
      public long getSize() {
        return size_;
      }
      /**
       * <code>optional int64 size = 3;</code>
       * @param value The size to set.
       * @return This builder for chaining.
       */
      public Builder setSize(long value) {

        size_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 size = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearSize() {
        bitField0_ = (bitField0_ & ~0x00000004);
        size_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.LocalizedResourceProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.LocalizedResourceProto)
    private static final org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<LocalizedResourceProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<LocalizedResourceProto>() {
      @java.lang.Override
      public LocalizedResourceProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<LocalizedResourceProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<LocalizedResourceProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface LogDeleterProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.LogDeleterProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string user = 1;</code>
     * @return Whether the user field is set.
     */
    boolean hasUser();
    /**
     * <code>optional string user = 1;</code>
     * @return The user.
     */
    java.lang.String getUser();
    /**
     * <code>optional string user = 1;</code>
     * @return The bytes for user.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes();

    /**
     * <code>optional int64 deletionTime = 2;</code>
     * @return Whether the deletionTime field is set.
     */
    boolean hasDeletionTime();
    /**
     * <code>optional int64 deletionTime = 2;</code>
     * @return The deletionTime.
     */
    long getDeletionTime();
  }
  /**
   * Protobuf type {@code hadoop.yarn.LogDeleterProto}
   */
  public static final class LogDeleterProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.LogDeleterProto)
      LogDeleterProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use LogDeleterProto.newBuilder() to construct.
    private LogDeleterProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private LogDeleterProto() {
      user_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new LogDeleterProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_LogDeleterProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_LogDeleterProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto.class, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto.Builder.class);
    }

    private int bitField0_;
    public static final int USER_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object user_ = "";
    /**
     * <code>optional string user = 1;</code>
     * @return Whether the user field is set.
     */
    @java.lang.Override
    public boolean hasUser() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string user = 1;</code>
     * @return The user.
     */
    @java.lang.Override
    public java.lang.String getUser() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          user_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string user = 1;</code>
     * @return The bytes for user.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getUserBytes() {
      java.lang.Object ref = user_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        user_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int DELETIONTIME_FIELD_NUMBER = 2;
    private long deletionTime_ = 0L;
    /**
     * <code>optional int64 deletionTime = 2;</code>
     * @return Whether the deletionTime field is set.
     */
    @java.lang.Override
    public boolean hasDeletionTime() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional int64 deletionTime = 2;</code>
     * @return The deletionTime.
     */
    @java.lang.Override
    public long getDeletionTime() {
      return deletionTime_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, user_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeInt64(2, deletionTime_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, user_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(2, deletionTime_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto other = (org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto) obj;

      if (hasUser() != other.hasUser()) return false;
      if (hasUser()) {
        if (!getUser()
            .equals(other.getUser())) return false;
      }
      if (hasDeletionTime() != other.hasDeletionTime()) return false;
      if (hasDeletionTime()) {
        if (getDeletionTime()
            != other.getDeletionTime()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasUser()) {
        hash = (37 * hash) + USER_FIELD_NUMBER;
        hash = (53 * hash) + getUser().hashCode();
      }
      if (hasDeletionTime()) {
        hash = (37 * hash) + DELETIONTIME_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getDeletionTime());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.LogDeleterProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.LogDeleterProto)
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_LogDeleterProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_LogDeleterProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto.class, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        user_ = "";
        deletionTime_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_LogDeleterProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto build() {
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto result = new org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.user_ = user_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.deletionTime_ = deletionTime_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto.getDefaultInstance()) return this;
        if (other.hasUser()) {
          user_ = other.user_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasDeletionTime()) {
          setDeletionTime(other.getDeletionTime());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                user_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 16: {
                deletionTime_ = input.readInt64();
                bitField0_ |= 0x00000002;
                break;
              } // case 16
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object user_ = "";
      /**
       * <code>optional string user = 1;</code>
       * @return Whether the user field is set.
       */
      public boolean hasUser() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string user = 1;</code>
       * @return The user.
       */
      public java.lang.String getUser() {
        java.lang.Object ref = user_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            user_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string user = 1;</code>
       * @return The bytes for user.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getUserBytes() {
        java.lang.Object ref = user_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          user_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string user = 1;</code>
       * @param value The user to set.
       * @return This builder for chaining.
       */
      public Builder setUser(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        user_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string user = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearUser() {
        user_ = getDefaultInstance().getUser();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string user = 1;</code>
       * @param value The bytes for user to set.
       * @return This builder for chaining.
       */
      public Builder setUserBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        user_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private long deletionTime_ ;
      /**
       * <code>optional int64 deletionTime = 2;</code>
       * @return Whether the deletionTime field is set.
       */
      @java.lang.Override
      public boolean hasDeletionTime() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional int64 deletionTime = 2;</code>
       * @return The deletionTime.
       */
      @java.lang.Override
      public long getDeletionTime() {
        return deletionTime_;
      }
      /**
       * <code>optional int64 deletionTime = 2;</code>
       * @param value The deletionTime to set.
       * @return This builder for chaining.
       */
      public Builder setDeletionTime(long value) {

        deletionTime_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 deletionTime = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearDeletionTime() {
        bitField0_ = (bitField0_ & ~0x00000002);
        deletionTime_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.LogDeleterProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.LogDeleterProto)
    private static final org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<LogDeleterProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<LogDeleterProto>() {
      @java.lang.Override
      public LogDeleterProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<LogDeleterProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<LogDeleterProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface FlowContextProtoOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.FlowContextProto)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>optional string flowName = 1;</code>
     * @return Whether the flowName field is set.
     */
    boolean hasFlowName();
    /**
     * <code>optional string flowName = 1;</code>
     * @return The flowName.
     */
    java.lang.String getFlowName();
    /**
     * <code>optional string flowName = 1;</code>
     * @return The bytes for flowName.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getFlowNameBytes();

    /**
     * <code>optional string flowVersion = 2;</code>
     * @return Whether the flowVersion field is set.
     */
    boolean hasFlowVersion();
    /**
     * <code>optional string flowVersion = 2;</code>
     * @return The flowVersion.
     */
    java.lang.String getFlowVersion();
    /**
     * <code>optional string flowVersion = 2;</code>
     * @return The bytes for flowVersion.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getFlowVersionBytes();

    /**
     * <code>optional int64 flowRunId = 3;</code>
     * @return Whether the flowRunId field is set.
     */
    boolean hasFlowRunId();
    /**
     * <code>optional int64 flowRunId = 3;</code>
     * @return The flowRunId.
     */
    long getFlowRunId();
  }
  /**
   * Protobuf type {@code hadoop.yarn.FlowContextProto}
   */
  public static final class FlowContextProto extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.FlowContextProto)
      FlowContextProtoOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use FlowContextProto.newBuilder() to construct.
    private FlowContextProto(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private FlowContextProto() {
      flowName_ = "";
      flowVersion_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new FlowContextProto();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_FlowContextProto_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_FlowContextProto_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.class, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.Builder.class);
    }

    private int bitField0_;
    public static final int FLOWNAME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object flowName_ = "";
    /**
     * <code>optional string flowName = 1;</code>
     * @return Whether the flowName field is set.
     */
    @java.lang.Override
    public boolean hasFlowName() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>optional string flowName = 1;</code>
     * @return The flowName.
     */
    @java.lang.Override
    public java.lang.String getFlowName() {
      java.lang.Object ref = flowName_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          flowName_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string flowName = 1;</code>
     * @return The bytes for flowName.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getFlowNameBytes() {
      java.lang.Object ref = flowName_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        flowName_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int FLOWVERSION_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object flowVersion_ = "";
    /**
     * <code>optional string flowVersion = 2;</code>
     * @return Whether the flowVersion field is set.
     */
    @java.lang.Override
    public boolean hasFlowVersion() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string flowVersion = 2;</code>
     * @return The flowVersion.
     */
    @java.lang.Override
    public java.lang.String getFlowVersion() {
      java.lang.Object ref = flowVersion_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          flowVersion_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string flowVersion = 2;</code>
     * @return The bytes for flowVersion.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getFlowVersionBytes() {
      java.lang.Object ref = flowVersion_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        flowVersion_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int FLOWRUNID_FIELD_NUMBER = 3;
    private long flowRunId_ = 0L;
    /**
     * <code>optional int64 flowRunId = 3;</code>
     * @return Whether the flowRunId field is set.
     */
    @java.lang.Override
    public boolean hasFlowRunId() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>optional int64 flowRunId = 3;</code>
     * @return The flowRunId.
     */
    @java.lang.Override
    public long getFlowRunId() {
      return flowRunId_;
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, flowName_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, flowVersion_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        output.writeInt64(3, flowRunId_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, flowName_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, flowVersion_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeInt64Size(3, flowRunId_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto other = (org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto) obj;

      if (hasFlowName() != other.hasFlowName()) return false;
      if (hasFlowName()) {
        if (!getFlowName()
            .equals(other.getFlowName())) return false;
      }
      if (hasFlowVersion() != other.hasFlowVersion()) return false;
      if (hasFlowVersion()) {
        if (!getFlowVersion()
            .equals(other.getFlowVersion())) return false;
      }
      if (hasFlowRunId() != other.hasFlowRunId()) return false;
      if (hasFlowRunId()) {
        if (getFlowRunId()
            != other.getFlowRunId()) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasFlowName()) {
        hash = (37 * hash) + FLOWNAME_FIELD_NUMBER;
        hash = (53 * hash) + getFlowName().hashCode();
      }
      if (hasFlowVersion()) {
        hash = (37 * hash) + FLOWVERSION_FIELD_NUMBER;
        hash = (53 * hash) + getFlowVersion().hashCode();
      }
      if (hasFlowRunId()) {
        hash = (37 * hash) + FLOWRUNID_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashLong(
            getFlowRunId());
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.FlowContextProto}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.FlowContextProto)
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProtoOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_FlowContextProto_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_FlowContextProto_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.class, org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        flowName_ = "";
        flowVersion_ = "";
        flowRunId_ = 0L;
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.internal_static_hadoop_yarn_FlowContextProto_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto build() {
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto buildPartial() {
        org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto result = new org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.flowName_ = flowName_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.flowVersion_ = flowVersion_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.flowRunId_ = flowRunId_;
          to_bitField0_ |= 0x00000004;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto) {
          return mergeFrom((org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto other) {
        if (other == org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto.getDefaultInstance()) return this;
        if (other.hasFlowName()) {
          flowName_ = other.flowName_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasFlowVersion()) {
          flowVersion_ = other.flowVersion_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        if (other.hasFlowRunId()) {
          setFlowRunId(other.getFlowRunId());
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                flowName_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                flowVersion_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              case 24: {
                flowRunId_ = input.readInt64();
                bitField0_ |= 0x00000004;
                break;
              } // case 24
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object flowName_ = "";
      /**
       * <code>optional string flowName = 1;</code>
       * @return Whether the flowName field is set.
       */
      public boolean hasFlowName() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>optional string flowName = 1;</code>
       * @return The flowName.
       */
      public java.lang.String getFlowName() {
        java.lang.Object ref = flowName_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            flowName_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string flowName = 1;</code>
       * @return The bytes for flowName.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getFlowNameBytes() {
        java.lang.Object ref = flowName_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          flowName_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string flowName = 1;</code>
       * @param value The flowName to set.
       * @return This builder for chaining.
       */
      public Builder setFlowName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        flowName_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>optional string flowName = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearFlowName() {
        flowName_ = getDefaultInstance().getFlowName();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>optional string flowName = 1;</code>
       * @param value The bytes for flowName to set.
       * @return This builder for chaining.
       */
      public Builder setFlowNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        flowName_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object flowVersion_ = "";
      /**
       * <code>optional string flowVersion = 2;</code>
       * @return Whether the flowVersion field is set.
       */
      public boolean hasFlowVersion() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>optional string flowVersion = 2;</code>
       * @return The flowVersion.
       */
      public java.lang.String getFlowVersion() {
        java.lang.Object ref = flowVersion_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            flowVersion_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string flowVersion = 2;</code>
       * @return The bytes for flowVersion.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getFlowVersionBytes() {
        java.lang.Object ref = flowVersion_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          flowVersion_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string flowVersion = 2;</code>
       * @param value The flowVersion to set.
       * @return This builder for chaining.
       */
      public Builder setFlowVersion(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        flowVersion_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>optional string flowVersion = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearFlowVersion() {
        flowVersion_ = getDefaultInstance().getFlowVersion();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>optional string flowVersion = 2;</code>
       * @param value The bytes for flowVersion to set.
       * @return This builder for chaining.
       */
      public Builder setFlowVersionBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        flowVersion_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }

      private long flowRunId_ ;
      /**
       * <code>optional int64 flowRunId = 3;</code>
       * @return Whether the flowRunId field is set.
       */
      @java.lang.Override
      public boolean hasFlowRunId() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional int64 flowRunId = 3;</code>
       * @return The flowRunId.
       */
      @java.lang.Override
      public long getFlowRunId() {
        return flowRunId_;
      }
      /**
       * <code>optional int64 flowRunId = 3;</code>
       * @param value The flowRunId to set.
       * @return This builder for chaining.
       */
      public Builder setFlowRunId(long value) {

        flowRunId_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional int64 flowRunId = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearFlowRunId() {
        bitField0_ = (bitField0_ & ~0x00000004);
        flowRunId_ = 0L;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.FlowContextProto)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.FlowContextProto)
    private static final org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto();
    }

    public static org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<FlowContextProto>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<FlowContextProto>() {
      @java.lang.Override
      public FlowContextProto parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<FlowContextProto> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<FlowContextProto> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.FlowContextProto getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ContainerManagerApplicationProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ContainerManagerApplicationProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_DeletionServiceDeleteTaskProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_DeletionServiceDeleteTaskProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_LocalizedResourceProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_LocalizedResourceProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_LogDeleterProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_LogDeleterProto_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_FlowContextProto_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_FlowContextProto_fieldAccessorTable;

  public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static  org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n&yarn_server_nodemanager_recovery.proto" +
      "\022\013hadoop.yarn\032\021yarn_protos.proto\"\314\002\n Con" +
      "tainerManagerApplicationProto\022+\n\002id\030\001 \001(" +
      "\0132\037.hadoop.yarn.ApplicationIdProto\022\014\n\004us" +
      "er\030\002 \001(\t\022\023\n\013credentials\030\003 \001(\014\0221\n\004acls\030\004 " +
      "\003(\0132#.hadoop.yarn.ApplicationACLMapProto" +
      "\022H\n\027log_aggregation_context\030\005 \001(\0132\'.hado" +
      "op.yarn.LogAggregationContextProto\022\'\n\033ap" +
      "pLogAggregationInitedTime\030\006 \001(\003:\002-1\0222\n\013f" +
      "lowContext\030\007 \001(\0132\035.hadoop.yarn.FlowConte" +
      "xtProto\"\265\001\n\036DeletionServiceDeleteTaskPro" +
      "to\022\n\n\002id\030\001 \001(\005\022\014\n\004user\030\002 \001(\t\022\016\n\006subdir\030\003" +
      " \001(\t\022\024\n\014deletionTime\030\004 \001(\003\022\020\n\010basedirs\030\005" +
      " \003(\t\022\024\n\014successorIds\030\006 \003(\005\022\020\n\010taskType\030\007" +
      " \001(\t\022\031\n\021dockerContainerId\030\010 \001(\t\"l\n\026Local" +
      "izedResourceProto\0221\n\010resource\030\001 \001(\0132\037.ha" +
      "doop.yarn.LocalResourceProto\022\021\n\tlocalPat" +
      "h\030\002 \001(\t\022\014\n\004size\030\003 \001(\003\"5\n\017LogDeleterProto" +
      "\022\014\n\004user\030\001 \001(\t\022\024\n\014deletionTime\030\002 \001(\003\"L\n\020" +
      "FlowContextProto\022\020\n\010flowName\030\001 \001(\t\022\023\n\013fl" +
      "owVersion\030\002 \001(\t\022\021\n\tflowRunId\030\003 \001(\003BI\n\034or" +
      "g.apache.hadoop.yarn.protoB#YarnServerNo" +
      "demanagerRecoveryProtos\210\001\001\240\001\001"
    };
    descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(),
        });
    internal_static_hadoop_yarn_ContainerManagerApplicationProto_descriptor =
      getDescriptor().getMessageTypes().get(0);
    internal_static_hadoop_yarn_ContainerManagerApplicationProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ContainerManagerApplicationProto_descriptor,
        new java.lang.String[] { "Id", "User", "Credentials", "Acls", "LogAggregationContext", "AppLogAggregationInitedTime", "FlowContext", });
    internal_static_hadoop_yarn_DeletionServiceDeleteTaskProto_descriptor =
      getDescriptor().getMessageTypes().get(1);
    internal_static_hadoop_yarn_DeletionServiceDeleteTaskProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_DeletionServiceDeleteTaskProto_descriptor,
        new java.lang.String[] { "Id", "User", "Subdir", "DeletionTime", "Basedirs", "SuccessorIds", "TaskType", "DockerContainerId", });
    internal_static_hadoop_yarn_LocalizedResourceProto_descriptor =
      getDescriptor().getMessageTypes().get(2);
    internal_static_hadoop_yarn_LocalizedResourceProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_LocalizedResourceProto_descriptor,
        new java.lang.String[] { "Resource", "LocalPath", "Size", });
    internal_static_hadoop_yarn_LogDeleterProto_descriptor =
      getDescriptor().getMessageTypes().get(3);
    internal_static_hadoop_yarn_LogDeleterProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_LogDeleterProto_descriptor,
        new java.lang.String[] { "User", "DeletionTime", });
    internal_static_hadoop_yarn_FlowContextProto_descriptor =
      getDescriptor().getMessageTypes().get(4);
    internal_static_hadoop_yarn_FlowContextProto_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_FlowContextProto_descriptor,
        new java.lang.String[] { "FlowName", "FlowVersion", "FlowRunId", });
    org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor();
  }

  // @@protoc_insertion_point(outer_class_scope)
}