CsiAdaptorProtos.java

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: yarn_csi_adaptor.proto

// Protobuf Java Version: 3.25.5
package org.apache.hadoop.yarn.proto;

public final class CsiAdaptorProtos {
  private CsiAdaptorProtos() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  public interface ValidateVolumeCapabilitiesRequestOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ValidateVolumeCapabilitiesRequest)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string volume_id = 1;</code>
     * @return Whether the volumeId field is set.
     */
    boolean hasVolumeId();
    /**
     * <code>required string volume_id = 1;</code>
     * @return The volumeId.
     */
    java.lang.String getVolumeId();
    /**
     * <code>required string volume_id = 1;</code>
     * @return The bytes for volumeId.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getVolumeIdBytes();

    /**
     * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability> 
        getVolumeCapabilitiesList();
    /**
     * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
     */
    org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapabilities(int index);
    /**
     * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
     */
    int getVolumeCapabilitiesCount();
    /**
     * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder> 
        getVolumeCapabilitiesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
     */
    org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilitiesOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> 
        getVolumeAttributesList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeAttributes(int index);
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
     */
    int getVolumeAttributesCount();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getVolumeAttributesOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeAttributesOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.ValidateVolumeCapabilitiesRequest}
   */
  public static final class ValidateVolumeCapabilitiesRequest extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ValidateVolumeCapabilitiesRequest)
      ValidateVolumeCapabilitiesRequestOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ValidateVolumeCapabilitiesRequest.newBuilder() to construct.
    private ValidateVolumeCapabilitiesRequest(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ValidateVolumeCapabilitiesRequest() {
      volumeId_ = "";
      volumeCapabilities_ = java.util.Collections.emptyList();
      volumeAttributes_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ValidateVolumeCapabilitiesRequest();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.Builder.class);
    }

    private int bitField0_;
    public static final int VOLUME_ID_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object volumeId_ = "";
    /**
     * <code>required string volume_id = 1;</code>
     * @return Whether the volumeId field is set.
     */
    @java.lang.Override
    public boolean hasVolumeId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string volume_id = 1;</code>
     * @return The volumeId.
     */
    @java.lang.Override
    public java.lang.String getVolumeId() {
      java.lang.Object ref = volumeId_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          volumeId_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string volume_id = 1;</code>
     * @return The bytes for volumeId.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getVolumeIdBytes() {
      java.lang.Object ref = volumeId_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        volumeId_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int VOLUME_CAPABILITIES_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability> volumeCapabilities_;
    /**
     * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability> getVolumeCapabilitiesList() {
      return volumeCapabilities_;
    }
    /**
     * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder> 
        getVolumeCapabilitiesOrBuilderList() {
      return volumeCapabilities_;
    }
    /**
     * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
     */
    @java.lang.Override
    public int getVolumeCapabilitiesCount() {
      return volumeCapabilities_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapabilities(int index) {
      return volumeCapabilities_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilitiesOrBuilder(
        int index) {
      return volumeCapabilities_.get(index);
    }

    public static final int VOLUME_ATTRIBUTES_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> volumeAttributes_;
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getVolumeAttributesList() {
      return volumeAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getVolumeAttributesOrBuilderList() {
      return volumeAttributes_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
     */
    @java.lang.Override
    public int getVolumeAttributesCount() {
      return volumeAttributes_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeAttributes(int index) {
      return volumeAttributes_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeAttributesOrBuilder(
        int index) {
      return volumeAttributes_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasVolumeId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      for (int i = 0; i < getVolumeCapabilitiesCount(); i++) {
        if (!getVolumeCapabilities(i).isInitialized()) {
          memoizedIsInitialized = 0;
          return false;
        }
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, volumeId_);
      }
      for (int i = 0; i < volumeCapabilities_.size(); i++) {
        output.writeMessage(2, volumeCapabilities_.get(i));
      }
      for (int i = 0; i < volumeAttributes_.size(); i++) {
        output.writeMessage(3, volumeAttributes_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, volumeId_);
      }
      for (int i = 0; i < volumeCapabilities_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, volumeCapabilities_.get(i));
      }
      for (int i = 0; i < volumeAttributes_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(3, volumeAttributes_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest other = (org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest) obj;

      if (hasVolumeId() != other.hasVolumeId()) return false;
      if (hasVolumeId()) {
        if (!getVolumeId()
            .equals(other.getVolumeId())) return false;
      }
      if (!getVolumeCapabilitiesList()
          .equals(other.getVolumeCapabilitiesList())) return false;
      if (!getVolumeAttributesList()
          .equals(other.getVolumeAttributesList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasVolumeId()) {
        hash = (37 * hash) + VOLUME_ID_FIELD_NUMBER;
        hash = (53 * hash) + getVolumeId().hashCode();
      }
      if (getVolumeCapabilitiesCount() > 0) {
        hash = (37 * hash) + VOLUME_CAPABILITIES_FIELD_NUMBER;
        hash = (53 * hash) + getVolumeCapabilitiesList().hashCode();
      }
      if (getVolumeAttributesCount() > 0) {
        hash = (37 * hash) + VOLUME_ATTRIBUTES_FIELD_NUMBER;
        hash = (53 * hash) + getVolumeAttributesList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ValidateVolumeCapabilitiesRequest}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ValidateVolumeCapabilitiesRequest)
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequestOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        volumeId_ = "";
        if (volumeCapabilitiesBuilder_ == null) {
          volumeCapabilities_ = java.util.Collections.emptyList();
        } else {
          volumeCapabilities_ = null;
          volumeCapabilitiesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        if (volumeAttributesBuilder_ == null) {
          volumeAttributes_ = java.util.Collections.emptyList();
        } else {
          volumeAttributes_ = null;
          volumeAttributesBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000004);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest build() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest buildPartial() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest result = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest result) {
        if (volumeCapabilitiesBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            volumeCapabilities_ = java.util.Collections.unmodifiableList(volumeCapabilities_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.volumeCapabilities_ = volumeCapabilities_;
        } else {
          result.volumeCapabilities_ = volumeCapabilitiesBuilder_.build();
        }
        if (volumeAttributesBuilder_ == null) {
          if (((bitField0_ & 0x00000004) != 0)) {
            volumeAttributes_ = java.util.Collections.unmodifiableList(volumeAttributes_);
            bitField0_ = (bitField0_ & ~0x00000004);
          }
          result.volumeAttributes_ = volumeAttributes_;
        } else {
          result.volumeAttributes_ = volumeAttributesBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.volumeId_ = volumeId_;
          to_bitField0_ |= 0x00000001;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest) {
          return mergeFrom((org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest other) {
        if (other == org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest.getDefaultInstance()) return this;
        if (other.hasVolumeId()) {
          volumeId_ = other.volumeId_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (volumeCapabilitiesBuilder_ == null) {
          if (!other.volumeCapabilities_.isEmpty()) {
            if (volumeCapabilities_.isEmpty()) {
              volumeCapabilities_ = other.volumeCapabilities_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensureVolumeCapabilitiesIsMutable();
              volumeCapabilities_.addAll(other.volumeCapabilities_);
            }
            onChanged();
          }
        } else {
          if (!other.volumeCapabilities_.isEmpty()) {
            if (volumeCapabilitiesBuilder_.isEmpty()) {
              volumeCapabilitiesBuilder_.dispose();
              volumeCapabilitiesBuilder_ = null;
              volumeCapabilities_ = other.volumeCapabilities_;
              bitField0_ = (bitField0_ & ~0x00000002);
              volumeCapabilitiesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getVolumeCapabilitiesFieldBuilder() : null;
            } else {
              volumeCapabilitiesBuilder_.addAllMessages(other.volumeCapabilities_);
            }
          }
        }
        if (volumeAttributesBuilder_ == null) {
          if (!other.volumeAttributes_.isEmpty()) {
            if (volumeAttributes_.isEmpty()) {
              volumeAttributes_ = other.volumeAttributes_;
              bitField0_ = (bitField0_ & ~0x00000004);
            } else {
              ensureVolumeAttributesIsMutable();
              volumeAttributes_.addAll(other.volumeAttributes_);
            }
            onChanged();
          }
        } else {
          if (!other.volumeAttributes_.isEmpty()) {
            if (volumeAttributesBuilder_.isEmpty()) {
              volumeAttributesBuilder_.dispose();
              volumeAttributesBuilder_ = null;
              volumeAttributes_ = other.volumeAttributes_;
              bitField0_ = (bitField0_ & ~0x00000004);
              volumeAttributesBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getVolumeAttributesFieldBuilder() : null;
            } else {
              volumeAttributesBuilder_.addAllMessages(other.volumeAttributes_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasVolumeId()) {
          return false;
        }
        for (int i = 0; i < getVolumeCapabilitiesCount(); i++) {
          if (!getVolumeCapabilities(i).isInitialized()) {
            return false;
          }
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                volumeId_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.PARSER,
                        extensionRegistry);
                if (volumeCapabilitiesBuilder_ == null) {
                  ensureVolumeCapabilitiesIsMutable();
                  volumeCapabilities_.add(m);
                } else {
                  volumeCapabilitiesBuilder_.addMessage(m);
                }
                break;
              } // case 18
              case 26: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER,
                        extensionRegistry);
                if (volumeAttributesBuilder_ == null) {
                  ensureVolumeAttributesIsMutable();
                  volumeAttributes_.add(m);
                } else {
                  volumeAttributesBuilder_.addMessage(m);
                }
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object volumeId_ = "";
      /**
       * <code>required string volume_id = 1;</code>
       * @return Whether the volumeId field is set.
       */
      public boolean hasVolumeId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @return The volumeId.
       */
      public java.lang.String getVolumeId() {
        java.lang.Object ref = volumeId_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            volumeId_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @return The bytes for volumeId.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getVolumeIdBytes() {
        java.lang.Object ref = volumeId_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          volumeId_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @param value The volumeId to set.
       * @return This builder for chaining.
       */
      public Builder setVolumeId(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        volumeId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearVolumeId() {
        volumeId_ = getDefaultInstance().getVolumeId();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @param value The bytes for volumeId to set.
       * @return This builder for chaining.
       */
      public Builder setVolumeIdBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        volumeId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability> volumeCapabilities_ =
        java.util.Collections.emptyList();
      private void ensureVolumeCapabilitiesIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          volumeCapabilities_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability>(volumeCapabilities_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder> volumeCapabilitiesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability> getVolumeCapabilitiesList() {
        if (volumeCapabilitiesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(volumeCapabilities_);
        } else {
          return volumeCapabilitiesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public int getVolumeCapabilitiesCount() {
        if (volumeCapabilitiesBuilder_ == null) {
          return volumeCapabilities_.size();
        } else {
          return volumeCapabilitiesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapabilities(int index) {
        if (volumeCapabilitiesBuilder_ == null) {
          return volumeCapabilities_.get(index);
        } else {
          return volumeCapabilitiesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public Builder setVolumeCapabilities(
          int index, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability value) {
        if (volumeCapabilitiesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureVolumeCapabilitiesIsMutable();
          volumeCapabilities_.set(index, value);
          onChanged();
        } else {
          volumeCapabilitiesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public Builder setVolumeCapabilities(
          int index, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder builderForValue) {
        if (volumeCapabilitiesBuilder_ == null) {
          ensureVolumeCapabilitiesIsMutable();
          volumeCapabilities_.set(index, builderForValue.build());
          onChanged();
        } else {
          volumeCapabilitiesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public Builder addVolumeCapabilities(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability value) {
        if (volumeCapabilitiesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureVolumeCapabilitiesIsMutable();
          volumeCapabilities_.add(value);
          onChanged();
        } else {
          volumeCapabilitiesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public Builder addVolumeCapabilities(
          int index, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability value) {
        if (volumeCapabilitiesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureVolumeCapabilitiesIsMutable();
          volumeCapabilities_.add(index, value);
          onChanged();
        } else {
          volumeCapabilitiesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public Builder addVolumeCapabilities(
          org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder builderForValue) {
        if (volumeCapabilitiesBuilder_ == null) {
          ensureVolumeCapabilitiesIsMutable();
          volumeCapabilities_.add(builderForValue.build());
          onChanged();
        } else {
          volumeCapabilitiesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public Builder addVolumeCapabilities(
          int index, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder builderForValue) {
        if (volumeCapabilitiesBuilder_ == null) {
          ensureVolumeCapabilitiesIsMutable();
          volumeCapabilities_.add(index, builderForValue.build());
          onChanged();
        } else {
          volumeCapabilitiesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public Builder addAllVolumeCapabilities(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability> values) {
        if (volumeCapabilitiesBuilder_ == null) {
          ensureVolumeCapabilitiesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, volumeCapabilities_);
          onChanged();
        } else {
          volumeCapabilitiesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public Builder clearVolumeCapabilities() {
        if (volumeCapabilitiesBuilder_ == null) {
          volumeCapabilities_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          volumeCapabilitiesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public Builder removeVolumeCapabilities(int index) {
        if (volumeCapabilitiesBuilder_ == null) {
          ensureVolumeCapabilitiesIsMutable();
          volumeCapabilities_.remove(index);
          onChanged();
        } else {
          volumeCapabilitiesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder getVolumeCapabilitiesBuilder(
          int index) {
        return getVolumeCapabilitiesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilitiesOrBuilder(
          int index) {
        if (volumeCapabilitiesBuilder_ == null) {
          return volumeCapabilities_.get(index);  } else {
          return volumeCapabilitiesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder> 
           getVolumeCapabilitiesOrBuilderList() {
        if (volumeCapabilitiesBuilder_ != null) {
          return volumeCapabilitiesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(volumeCapabilities_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder addVolumeCapabilitiesBuilder() {
        return getVolumeCapabilitiesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder addVolumeCapabilitiesBuilder(
          int index) {
        return getVolumeCapabilitiesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.VolumeCapability volume_capabilities = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder> 
           getVolumeCapabilitiesBuilderList() {
        return getVolumeCapabilitiesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder> 
          getVolumeCapabilitiesFieldBuilder() {
        if (volumeCapabilitiesBuilder_ == null) {
          volumeCapabilitiesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder>(
                  volumeCapabilities_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          volumeCapabilities_ = null;
        }
        return volumeCapabilitiesBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> volumeAttributes_ =
        java.util.Collections.emptyList();
      private void ensureVolumeAttributesIsMutable() {
        if (!((bitField0_ & 0x00000004) != 0)) {
          volumeAttributes_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto>(volumeAttributes_);
          bitField0_ |= 0x00000004;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> volumeAttributesBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getVolumeAttributesList() {
        if (volumeAttributesBuilder_ == null) {
          return java.util.Collections.unmodifiableList(volumeAttributes_);
        } else {
          return volumeAttributesBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public int getVolumeAttributesCount() {
        if (volumeAttributesBuilder_ == null) {
          return volumeAttributes_.size();
        } else {
          return volumeAttributesBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeAttributes(int index) {
        if (volumeAttributesBuilder_ == null) {
          return volumeAttributes_.get(index);
        } else {
          return volumeAttributesBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public Builder setVolumeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (volumeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureVolumeAttributesIsMutable();
          volumeAttributes_.set(index, value);
          onChanged();
        } else {
          volumeAttributesBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public Builder setVolumeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (volumeAttributesBuilder_ == null) {
          ensureVolumeAttributesIsMutable();
          volumeAttributes_.set(index, builderForValue.build());
          onChanged();
        } else {
          volumeAttributesBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public Builder addVolumeAttributes(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (volumeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureVolumeAttributesIsMutable();
          volumeAttributes_.add(value);
          onChanged();
        } else {
          volumeAttributesBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public Builder addVolumeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (volumeAttributesBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureVolumeAttributesIsMutable();
          volumeAttributes_.add(index, value);
          onChanged();
        } else {
          volumeAttributesBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public Builder addVolumeAttributes(
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (volumeAttributesBuilder_ == null) {
          ensureVolumeAttributesIsMutable();
          volumeAttributes_.add(builderForValue.build());
          onChanged();
        } else {
          volumeAttributesBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public Builder addVolumeAttributes(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (volumeAttributesBuilder_ == null) {
          ensureVolumeAttributesIsMutable();
          volumeAttributes_.add(index, builderForValue.build());
          onChanged();
        } else {
          volumeAttributesBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public Builder addAllVolumeAttributes(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
        if (volumeAttributesBuilder_ == null) {
          ensureVolumeAttributesIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, volumeAttributes_);
          onChanged();
        } else {
          volumeAttributesBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public Builder clearVolumeAttributes() {
        if (volumeAttributesBuilder_ == null) {
          volumeAttributes_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000004);
          onChanged();
        } else {
          volumeAttributesBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public Builder removeVolumeAttributes(int index) {
        if (volumeAttributesBuilder_ == null) {
          ensureVolumeAttributesIsMutable();
          volumeAttributes_.remove(index);
          onChanged();
        } else {
          volumeAttributesBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getVolumeAttributesBuilder(
          int index) {
        return getVolumeAttributesFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeAttributesOrBuilder(
          int index) {
        if (volumeAttributesBuilder_ == null) {
          return volumeAttributes_.get(index);  } else {
          return volumeAttributesBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
           getVolumeAttributesOrBuilderList() {
        if (volumeAttributesBuilder_ != null) {
          return volumeAttributesBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(volumeAttributes_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addVolumeAttributesBuilder() {
        return getVolumeAttributesFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addVolumeAttributesBuilder(
          int index) {
        return getVolumeAttributesFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_attributes = 3;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder> 
           getVolumeAttributesBuilderList() {
        return getVolumeAttributesFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
          getVolumeAttributesFieldBuilder() {
        if (volumeAttributesBuilder_ == null) {
          volumeAttributesBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
                  volumeAttributes_,
                  ((bitField0_ & 0x00000004) != 0),
                  getParentForChildren(),
                  isClean());
          volumeAttributes_ = null;
        }
        return volumeAttributesBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ValidateVolumeCapabilitiesRequest)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ValidateVolumeCapabilitiesRequest)
    private static final org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest();
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ValidateVolumeCapabilitiesRequest>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ValidateVolumeCapabilitiesRequest>() {
      @java.lang.Override
      public ValidateVolumeCapabilitiesRequest parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ValidateVolumeCapabilitiesRequest> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ValidateVolumeCapabilitiesRequest> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesRequest getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface ValidateVolumeCapabilitiesResponseOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.ValidateVolumeCapabilitiesResponse)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <pre>
     * True if the Plugin supports the specified capabilities for the
     * given volume. This field is REQUIRED.
     * </pre>
     *
     * <code>required bool supported = 1;</code>
     * @return Whether the supported field is set.
     */
    boolean hasSupported();
    /**
     * <pre>
     * True if the Plugin supports the specified capabilities for the
     * given volume. This field is REQUIRED.
     * </pre>
     *
     * <code>required bool supported = 1;</code>
     * @return The supported.
     */
    boolean getSupported();

    /**
     * <pre>
     * Message to the CO if `supported` above is false. This field is
     * OPTIONAL.
     * An empty string is equal to an unspecified field value.
     * </pre>
     *
     * <code>optional string message = 2;</code>
     * @return Whether the message field is set.
     */
    boolean hasMessage();
    /**
     * <pre>
     * Message to the CO if `supported` above is false. This field is
     * OPTIONAL.
     * An empty string is equal to an unspecified field value.
     * </pre>
     *
     * <code>optional string message = 2;</code>
     * @return The message.
     */
    java.lang.String getMessage();
    /**
     * <pre>
     * Message to the CO if `supported` above is false. This field is
     * OPTIONAL.
     * An empty string is equal to an unspecified field value.
     * </pre>
     *
     * <code>optional string message = 2;</code>
     * @return The bytes for message.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getMessageBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.ValidateVolumeCapabilitiesResponse}
   */
  public static final class ValidateVolumeCapabilitiesResponse extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.ValidateVolumeCapabilitiesResponse)
      ValidateVolumeCapabilitiesResponseOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use ValidateVolumeCapabilitiesResponse.newBuilder() to construct.
    private ValidateVolumeCapabilitiesResponse(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private ValidateVolumeCapabilitiesResponse() {
      message_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new ValidateVolumeCapabilitiesResponse();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.Builder.class);
    }

    private int bitField0_;
    public static final int SUPPORTED_FIELD_NUMBER = 1;
    private boolean supported_ = false;
    /**
     * <pre>
     * True if the Plugin supports the specified capabilities for the
     * given volume. This field is REQUIRED.
     * </pre>
     *
     * <code>required bool supported = 1;</code>
     * @return Whether the supported field is set.
     */
    @java.lang.Override
    public boolean hasSupported() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <pre>
     * True if the Plugin supports the specified capabilities for the
     * given volume. This field is REQUIRED.
     * </pre>
     *
     * <code>required bool supported = 1;</code>
     * @return The supported.
     */
    @java.lang.Override
    public boolean getSupported() {
      return supported_;
    }

    public static final int MESSAGE_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object message_ = "";
    /**
     * <pre>
     * Message to the CO if `supported` above is false. This field is
     * OPTIONAL.
     * An empty string is equal to an unspecified field value.
     * </pre>
     *
     * <code>optional string message = 2;</code>
     * @return Whether the message field is set.
     */
    @java.lang.Override
    public boolean hasMessage() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <pre>
     * Message to the CO if `supported` above is false. This field is
     * OPTIONAL.
     * An empty string is equal to an unspecified field value.
     * </pre>
     *
     * <code>optional string message = 2;</code>
     * @return The message.
     */
    @java.lang.Override
    public java.lang.String getMessage() {
      java.lang.Object ref = message_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          message_ = s;
        }
        return s;
      }
    }
    /**
     * <pre>
     * Message to the CO if `supported` above is false. This field is
     * OPTIONAL.
     * An empty string is equal to an unspecified field value.
     * </pre>
     *
     * <code>optional string message = 2;</code>
     * @return The bytes for message.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getMessageBytes() {
      java.lang.Object ref = message_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        message_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasSupported()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeBool(1, supported_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, message_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(1, supported_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, message_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse other = (org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse) obj;

      if (hasSupported() != other.hasSupported()) return false;
      if (hasSupported()) {
        if (getSupported()
            != other.getSupported()) return false;
      }
      if (hasMessage() != other.hasMessage()) return false;
      if (hasMessage()) {
        if (!getMessage()
            .equals(other.getMessage())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasSupported()) {
        hash = (37 * hash) + SUPPORTED_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getSupported());
      }
      if (hasMessage()) {
        hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
        hash = (53 * hash) + getMessage().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.ValidateVolumeCapabilitiesResponse}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.ValidateVolumeCapabilitiesResponse)
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponseOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        supported_ = false;
        message_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse build() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse buildPartial() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse result = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.supported_ = supported_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.message_ = message_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse) {
          return mergeFrom((org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse other) {
        if (other == org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse.getDefaultInstance()) return this;
        if (other.hasSupported()) {
          setSupported(other.getSupported());
        }
        if (other.hasMessage()) {
          message_ = other.message_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasSupported()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                supported_ = input.readBool();
                bitField0_ |= 0x00000001;
                break;
              } // case 8
              case 18: {
                message_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private boolean supported_ ;
      /**
       * <pre>
       * True if the Plugin supports the specified capabilities for the
       * given volume. This field is REQUIRED.
       * </pre>
       *
       * <code>required bool supported = 1;</code>
       * @return Whether the supported field is set.
       */
      @java.lang.Override
      public boolean hasSupported() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <pre>
       * True if the Plugin supports the specified capabilities for the
       * given volume. This field is REQUIRED.
       * </pre>
       *
       * <code>required bool supported = 1;</code>
       * @return The supported.
       */
      @java.lang.Override
      public boolean getSupported() {
        return supported_;
      }
      /**
       * <pre>
       * True if the Plugin supports the specified capabilities for the
       * given volume. This field is REQUIRED.
       * </pre>
       *
       * <code>required bool supported = 1;</code>
       * @param value The supported to set.
       * @return This builder for chaining.
       */
      public Builder setSupported(boolean value) {

        supported_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <pre>
       * True if the Plugin supports the specified capabilities for the
       * given volume. This field is REQUIRED.
       * </pre>
       *
       * <code>required bool supported = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearSupported() {
        bitField0_ = (bitField0_ & ~0x00000001);
        supported_ = false;
        onChanged();
        return this;
      }

      private java.lang.Object message_ = "";
      /**
       * <pre>
       * Message to the CO if `supported` above is false. This field is
       * OPTIONAL.
       * An empty string is equal to an unspecified field value.
       * </pre>
       *
       * <code>optional string message = 2;</code>
       * @return Whether the message field is set.
       */
      public boolean hasMessage() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <pre>
       * Message to the CO if `supported` above is false. This field is
       * OPTIONAL.
       * An empty string is equal to an unspecified field value.
       * </pre>
       *
       * <code>optional string message = 2;</code>
       * @return The message.
       */
      public java.lang.String getMessage() {
        java.lang.Object ref = message_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            message_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <pre>
       * Message to the CO if `supported` above is false. This field is
       * OPTIONAL.
       * An empty string is equal to an unspecified field value.
       * </pre>
       *
       * <code>optional string message = 2;</code>
       * @return The bytes for message.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getMessageBytes() {
        java.lang.Object ref = message_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          message_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <pre>
       * Message to the CO if `supported` above is false. This field is
       * OPTIONAL.
       * An empty string is equal to an unspecified field value.
       * </pre>
       *
       * <code>optional string message = 2;</code>
       * @param value The message to set.
       * @return This builder for chaining.
       */
      public Builder setMessage(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        message_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <pre>
       * Message to the CO if `supported` above is false. This field is
       * OPTIONAL.
       * An empty string is equal to an unspecified field value.
       * </pre>
       *
       * <code>optional string message = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearMessage() {
        message_ = getDefaultInstance().getMessage();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <pre>
       * Message to the CO if `supported` above is false. This field is
       * OPTIONAL.
       * An empty string is equal to an unspecified field value.
       * </pre>
       *
       * <code>optional string message = 2;</code>
       * @param value The bytes for message to set.
       * @return This builder for chaining.
       */
      public Builder setMessageBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        message_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.ValidateVolumeCapabilitiesResponse)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.ValidateVolumeCapabilitiesResponse)
    private static final org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse();
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<ValidateVolumeCapabilitiesResponse>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<ValidateVolumeCapabilitiesResponse>() {
      @java.lang.Override
      public ValidateVolumeCapabilitiesResponse parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<ValidateVolumeCapabilitiesResponse> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<ValidateVolumeCapabilitiesResponse> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.ValidateVolumeCapabilitiesResponse getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface VolumeCapabilityOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.VolumeCapability)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;</code>
     * @return Whether the volumeType field is set.
     */
    boolean hasVolumeType();
    /**
     * <code>required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;</code>
     * @return The volumeType.
     */
    org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType getVolumeType();

    /**
     * <code>required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;</code>
     * @return Whether the accessMode field is set.
     */
    boolean hasAccessMode();
    /**
     * <code>required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;</code>
     * @return The accessMode.
     */
    org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode getAccessMode();

    /**
     * <code>repeated string mount_flags = 3;</code>
     * @return A list containing the mountFlags.
     */
    java.util.List<java.lang.String>
        getMountFlagsList();
    /**
     * <code>repeated string mount_flags = 3;</code>
     * @return The count of mountFlags.
     */
    int getMountFlagsCount();
    /**
     * <code>repeated string mount_flags = 3;</code>
     * @param index The index of the element to return.
     * @return The mountFlags at the given index.
     */
    java.lang.String getMountFlags(int index);
    /**
     * <code>repeated string mount_flags = 3;</code>
     * @param index The index of the value to return.
     * @return The bytes of the mountFlags at the given index.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getMountFlagsBytes(int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.VolumeCapability}
   */
  public static final class VolumeCapability extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.VolumeCapability)
      VolumeCapabilityOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use VolumeCapability.newBuilder() to construct.
    private VolumeCapability(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private VolumeCapability() {
      volumeType_ = 0;
      accessMode_ = 0;
      mountFlags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new VolumeCapability();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_VolumeCapability_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_VolumeCapability_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder.class);
    }

    /**
     * Protobuf enum {@code hadoop.yarn.VolumeCapability.VolumeType}
     */
    public enum VolumeType
        implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
      /**
       * <code>BLOCK = 0;</code>
       */
      BLOCK(0),
      /**
       * <code>FILE_SYSTEM = 1;</code>
       */
      FILE_SYSTEM(1),
      ;

      /**
       * <code>BLOCK = 0;</code>
       */
      public static final int BLOCK_VALUE = 0;
      /**
       * <code>FILE_SYSTEM = 1;</code>
       */
      public static final int FILE_SYSTEM_VALUE = 1;


      public final int getNumber() {
        return value;
      }

      /**
       * @param value The numeric wire value of the corresponding enum entry.
       * @return The enum associated with the given numeric wire value.
       * @deprecated Use {@link #forNumber(int)} instead.
       */
      @java.lang.Deprecated
      public static VolumeType valueOf(int value) {
        return forNumber(value);
      }

      /**
       * @param value The numeric wire value of the corresponding enum entry.
       * @return The enum associated with the given numeric wire value.
       */
      public static VolumeType forNumber(int value) {
        switch (value) {
          case 0: return BLOCK;
          case 1: return FILE_SYSTEM;
          default: return null;
        }
      }

      public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<VolumeType>
          internalGetValueMap() {
        return internalValueMap;
      }
      private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
          VolumeType> internalValueMap =
            new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<VolumeType>() {
              public VolumeType findValueByNumber(int number) {
                return VolumeType.forNumber(number);
              }
            };

      public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
          getValueDescriptor() {
        return getDescriptor().getValues().get(ordinal());
      }
      public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
          getDescriptorForType() {
        return getDescriptor();
      }
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDescriptor().getEnumTypes().get(0);
      }

      private static final VolumeType[] VALUES = values();

      public static VolumeType valueOf(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
        if (desc.getType() != getDescriptor()) {
          throw new java.lang.IllegalArgumentException(
            "EnumValueDescriptor is not for this type.");
        }
        return VALUES[desc.getIndex()];
      }

      private final int value;

      private VolumeType(int value) {
        this.value = value;
      }

      // @@protoc_insertion_point(enum_scope:hadoop.yarn.VolumeCapability.VolumeType)
    }

    /**
     * Protobuf enum {@code hadoop.yarn.VolumeCapability.AccessMode}
     */
    public enum AccessMode
        implements org.apache.hadoop.thirdparty.protobuf.ProtocolMessageEnum {
      /**
       * <code>UNKNOWN = 0;</code>
       */
      UNKNOWN(0),
      /**
       * <code>SINGLE_NODE_WRITER = 1;</code>
       */
      SINGLE_NODE_WRITER(1),
      /**
       * <code>SINGLE_NODE_READER_ONLY = 2;</code>
       */
      SINGLE_NODE_READER_ONLY(2),
      /**
       * <code>MULTI_NODE_READER_ONLY = 3;</code>
       */
      MULTI_NODE_READER_ONLY(3),
      /**
       * <code>MULTI_NODE_SINGLE_WRITER = 4;</code>
       */
      MULTI_NODE_SINGLE_WRITER(4),
      /**
       * <code>MULTI_NODE_MULTI_WRITER = 5;</code>
       */
      MULTI_NODE_MULTI_WRITER(5),
      ;

      /**
       * <code>UNKNOWN = 0;</code>
       */
      public static final int UNKNOWN_VALUE = 0;
      /**
       * <code>SINGLE_NODE_WRITER = 1;</code>
       */
      public static final int SINGLE_NODE_WRITER_VALUE = 1;
      /**
       * <code>SINGLE_NODE_READER_ONLY = 2;</code>
       */
      public static final int SINGLE_NODE_READER_ONLY_VALUE = 2;
      /**
       * <code>MULTI_NODE_READER_ONLY = 3;</code>
       */
      public static final int MULTI_NODE_READER_ONLY_VALUE = 3;
      /**
       * <code>MULTI_NODE_SINGLE_WRITER = 4;</code>
       */
      public static final int MULTI_NODE_SINGLE_WRITER_VALUE = 4;
      /**
       * <code>MULTI_NODE_MULTI_WRITER = 5;</code>
       */
      public static final int MULTI_NODE_MULTI_WRITER_VALUE = 5;


      public final int getNumber() {
        return value;
      }

      /**
       * @param value The numeric wire value of the corresponding enum entry.
       * @return The enum associated with the given numeric wire value.
       * @deprecated Use {@link #forNumber(int)} instead.
       */
      @java.lang.Deprecated
      public static AccessMode valueOf(int value) {
        return forNumber(value);
      }

      /**
       * @param value The numeric wire value of the corresponding enum entry.
       * @return The enum associated with the given numeric wire value.
       */
      public static AccessMode forNumber(int value) {
        switch (value) {
          case 0: return UNKNOWN;
          case 1: return SINGLE_NODE_WRITER;
          case 2: return SINGLE_NODE_READER_ONLY;
          case 3: return MULTI_NODE_READER_ONLY;
          case 4: return MULTI_NODE_SINGLE_WRITER;
          case 5: return MULTI_NODE_MULTI_WRITER;
          default: return null;
        }
      }

      public static org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<AccessMode>
          internalGetValueMap() {
        return internalValueMap;
      }
      private static final org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<
          AccessMode> internalValueMap =
            new org.apache.hadoop.thirdparty.protobuf.Internal.EnumLiteMap<AccessMode>() {
              public AccessMode findValueByNumber(int number) {
                return AccessMode.forNumber(number);
              }
            };

      public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor
          getValueDescriptor() {
        return getDescriptor().getValues().get(ordinal());
      }
      public final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
          getDescriptorForType() {
        return getDescriptor();
      }
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumDescriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDescriptor().getEnumTypes().get(1);
      }

      private static final AccessMode[] VALUES = values();

      public static AccessMode valueOf(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.EnumValueDescriptor desc) {
        if (desc.getType() != getDescriptor()) {
          throw new java.lang.IllegalArgumentException(
            "EnumValueDescriptor is not for this type.");
        }
        return VALUES[desc.getIndex()];
      }

      private final int value;

      private AccessMode(int value) {
        this.value = value;
      }

      // @@protoc_insertion_point(enum_scope:hadoop.yarn.VolumeCapability.AccessMode)
    }

    private int bitField0_;
    public static final int VOLUME_TYPE_FIELD_NUMBER = 1;
    private int volumeType_ = 0;
    /**
     * <code>required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;</code>
     * @return Whether the volumeType field is set.
     */
    @java.lang.Override public boolean hasVolumeType() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;</code>
     * @return The volumeType.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType getVolumeType() {
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType result = org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType.forNumber(volumeType_);
      return result == null ? org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType.BLOCK : result;
    }

    public static final int ACCESS_MODE_FIELD_NUMBER = 2;
    private int accessMode_ = 0;
    /**
     * <code>required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;</code>
     * @return Whether the accessMode field is set.
     */
    @java.lang.Override public boolean hasAccessMode() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;</code>
     * @return The accessMode.
     */
    @java.lang.Override public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode getAccessMode() {
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode result = org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode.forNumber(accessMode_);
      return result == null ? org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode.UNKNOWN : result;
    }

    public static final int MOUNT_FLAGS_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList mountFlags_ =
        org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
    /**
     * <code>repeated string mount_flags = 3;</code>
     * @return A list containing the mountFlags.
     */
    public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
        getMountFlagsList() {
      return mountFlags_;
    }
    /**
     * <code>repeated string mount_flags = 3;</code>
     * @return The count of mountFlags.
     */
    public int getMountFlagsCount() {
      return mountFlags_.size();
    }
    /**
     * <code>repeated string mount_flags = 3;</code>
     * @param index The index of the element to return.
     * @return The mountFlags at the given index.
     */
    public java.lang.String getMountFlags(int index) {
      return mountFlags_.get(index);
    }
    /**
     * <code>repeated string mount_flags = 3;</code>
     * @param index The index of the value to return.
     * @return The bytes of the mountFlags at the given index.
     */
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getMountFlagsBytes(int index) {
      return mountFlags_.getByteString(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasVolumeType()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasAccessMode()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        output.writeEnum(1, volumeType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        output.writeEnum(2, accessMode_);
      }
      for (int i = 0; i < mountFlags_.size(); i++) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, mountFlags_.getRaw(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(1, volumeType_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeEnumSize(2, accessMode_);
      }
      {
        int dataSize = 0;
        for (int i = 0; i < mountFlags_.size(); i++) {
          dataSize += computeStringSizeNoTag(mountFlags_.getRaw(i));
        }
        size += dataSize;
        size += 1 * getMountFlagsList().size();
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability other = (org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability) obj;

      if (hasVolumeType() != other.hasVolumeType()) return false;
      if (hasVolumeType()) {
        if (volumeType_ != other.volumeType_) return false;
      }
      if (hasAccessMode() != other.hasAccessMode()) return false;
      if (hasAccessMode()) {
        if (accessMode_ != other.accessMode_) return false;
      }
      if (!getMountFlagsList()
          .equals(other.getMountFlagsList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasVolumeType()) {
        hash = (37 * hash) + VOLUME_TYPE_FIELD_NUMBER;
        hash = (53 * hash) + volumeType_;
      }
      if (hasAccessMode()) {
        hash = (37 * hash) + ACCESS_MODE_FIELD_NUMBER;
        hash = (53 * hash) + accessMode_;
      }
      if (getMountFlagsCount() > 0) {
        hash = (37 * hash) + MOUNT_FLAGS_FIELD_NUMBER;
        hash = (53 * hash) + getMountFlagsList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.VolumeCapability}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.VolumeCapability)
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_VolumeCapability_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_VolumeCapability_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        volumeType_ = 0;
        accessMode_ = 0;
        mountFlags_ =
            org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_VolumeCapability_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability build() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability buildPartial() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability result = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.volumeType_ = volumeType_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.accessMode_ = accessMode_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          mountFlags_.makeImmutable();
          result.mountFlags_ = mountFlags_;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability) {
          return mergeFrom((org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability other) {
        if (other == org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance()) return this;
        if (other.hasVolumeType()) {
          setVolumeType(other.getVolumeType());
        }
        if (other.hasAccessMode()) {
          setAccessMode(other.getAccessMode());
        }
        if (!other.mountFlags_.isEmpty()) {
          if (mountFlags_.isEmpty()) {
            mountFlags_ = other.mountFlags_;
            bitField0_ |= 0x00000004;
          } else {
            ensureMountFlagsIsMutable();
            mountFlags_.addAll(other.mountFlags_);
          }
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasVolumeType()) {
          return false;
        }
        if (!hasAccessMode()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 8: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType tmpValue =
                    org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(1, tmpRaw);
                } else {
                  volumeType_ = tmpRaw;
                  bitField0_ |= 0x00000001;
                }
                break;
              } // case 8
              case 16: {
                int tmpRaw = input.readEnum();
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode tmpValue =
                    org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode.forNumber(tmpRaw);
                if (tmpValue == null) {
                  mergeUnknownVarintField(2, tmpRaw);
                } else {
                  accessMode_ = tmpRaw;
                  bitField0_ |= 0x00000002;
                }
                break;
              } // case 16
              case 26: {
                org.apache.hadoop.thirdparty.protobuf.ByteString bs = input.readBytes();
                ensureMountFlagsIsMutable();
                mountFlags_.add(bs);
                break;
              } // case 26
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private int volumeType_ = 0;
      /**
       * <code>required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;</code>
       * @return Whether the volumeType field is set.
       */
      @java.lang.Override public boolean hasVolumeType() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;</code>
       * @return The volumeType.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType getVolumeType() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType result = org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType.forNumber(volumeType_);
        return result == null ? org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType.BLOCK : result;
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;</code>
       * @param value The volumeType to set.
       * @return This builder for chaining.
       */
      public Builder setVolumeType(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.VolumeType value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000001;
        volumeType_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability.VolumeType volume_type = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearVolumeType() {
        bitField0_ = (bitField0_ & ~0x00000001);
        volumeType_ = 0;
        onChanged();
        return this;
      }

      private int accessMode_ = 0;
      /**
       * <code>required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;</code>
       * @return Whether the accessMode field is set.
       */
      @java.lang.Override public boolean hasAccessMode() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;</code>
       * @return The accessMode.
       */
      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode getAccessMode() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode result = org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode.forNumber(accessMode_);
        return result == null ? org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode.UNKNOWN : result;
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;</code>
       * @param value The accessMode to set.
       * @return This builder for chaining.
       */
      public Builder setAccessMode(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.AccessMode value) {
        if (value == null) {
          throw new NullPointerException();
        }
        bitField0_ |= 0x00000002;
        accessMode_ = value.getNumber();
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability.AccessMode access_mode = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearAccessMode() {
        bitField0_ = (bitField0_ & ~0x00000002);
        accessMode_ = 0;
        onChanged();
        return this;
      }

      private org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList mountFlags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
      private void ensureMountFlagsIsMutable() {
        if (!mountFlags_.isModifiable()) {
          mountFlags_ = new org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList(mountFlags_);
        }
        bitField0_ |= 0x00000004;
      }
      /**
       * <code>repeated string mount_flags = 3;</code>
       * @return A list containing the mountFlags.
       */
      public org.apache.hadoop.thirdparty.protobuf.ProtocolStringList
          getMountFlagsList() {
        mountFlags_.makeImmutable();
        return mountFlags_;
      }
      /**
       * <code>repeated string mount_flags = 3;</code>
       * @return The count of mountFlags.
       */
      public int getMountFlagsCount() {
        return mountFlags_.size();
      }
      /**
       * <code>repeated string mount_flags = 3;</code>
       * @param index The index of the element to return.
       * @return The mountFlags at the given index.
       */
      public java.lang.String getMountFlags(int index) {
        return mountFlags_.get(index);
      }
      /**
       * <code>repeated string mount_flags = 3;</code>
       * @param index The index of the value to return.
       * @return The bytes of the mountFlags at the given index.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getMountFlagsBytes(int index) {
        return mountFlags_.getByteString(index);
      }
      /**
       * <code>repeated string mount_flags = 3;</code>
       * @param index The index to set the value at.
       * @param value The mountFlags to set.
       * @return This builder for chaining.
       */
      public Builder setMountFlags(
          int index, java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureMountFlagsIsMutable();
        mountFlags_.set(index, value);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string mount_flags = 3;</code>
       * @param value The mountFlags to add.
       * @return This builder for chaining.
       */
      public Builder addMountFlags(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        ensureMountFlagsIsMutable();
        mountFlags_.add(value);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string mount_flags = 3;</code>
       * @param values The mountFlags to add.
       * @return This builder for chaining.
       */
      public Builder addAllMountFlags(
          java.lang.Iterable<java.lang.String> values) {
        ensureMountFlagsIsMutable();
        org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
            values, mountFlags_);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string mount_flags = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearMountFlags() {
        mountFlags_ =
          org.apache.hadoop.thirdparty.protobuf.LazyStringArrayList.emptyList();
        bitField0_ = (bitField0_ & ~0x00000004);;
        onChanged();
        return this;
      }
      /**
       * <code>repeated string mount_flags = 3;</code>
       * @param value The bytes of the mountFlags to add.
       * @return This builder for chaining.
       */
      public Builder addMountFlagsBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        ensureMountFlagsIsMutable();
        mountFlags_.add(value);
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.VolumeCapability)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.VolumeCapability)
    private static final org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability();
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<VolumeCapability>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<VolumeCapability>() {
      @java.lang.Override
      public VolumeCapability parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<VolumeCapability> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<VolumeCapability> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetPluginInfoRequestOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetPluginInfoRequest)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * <pre>
   * Intentionally empty.
   * </pre>
   *
   * Protobuf type {@code hadoop.yarn.GetPluginInfoRequest}
   */
  public static final class GetPluginInfoRequest extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetPluginInfoRequest)
      GetPluginInfoRequestOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetPluginInfoRequest.newBuilder() to construct.
    private GetPluginInfoRequest(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetPluginInfoRequest() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetPluginInfoRequest();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoRequest_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest other = (org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * <pre>
     * Intentionally empty.
     * </pre>
     *
     * Protobuf type {@code hadoop.yarn.GetPluginInfoRequest}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetPluginInfoRequest)
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequestOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoRequest_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest build() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest buildPartial() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest result = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest) {
          return mergeFrom((org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest other) {
        if (other == org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetPluginInfoRequest)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetPluginInfoRequest)
    private static final org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest();
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetPluginInfoRequest>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetPluginInfoRequest>() {
      @java.lang.Override
      public GetPluginInfoRequest parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetPluginInfoRequest> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetPluginInfoRequest> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoRequest getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface GetPluginInfoResponseOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.GetPluginInfoResponse)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string name = 1;</code>
     * @return Whether the name field is set.
     */
    boolean hasName();
    /**
     * <code>required string name = 1;</code>
     * @return The name.
     */
    java.lang.String getName();
    /**
     * <code>required string name = 1;</code>
     * @return The bytes for name.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes();

    /**
     * <code>required string vendor_version = 2;</code>
     * @return Whether the vendorVersion field is set.
     */
    boolean hasVendorVersion();
    /**
     * <code>required string vendor_version = 2;</code>
     * @return The vendorVersion.
     */
    java.lang.String getVendorVersion();
    /**
     * <code>required string vendor_version = 2;</code>
     * @return The bytes for vendorVersion.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getVendorVersionBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.GetPluginInfoResponse}
   */
  public static final class GetPluginInfoResponse extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.GetPluginInfoResponse)
      GetPluginInfoResponseOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use GetPluginInfoResponse.newBuilder() to construct.
    private GetPluginInfoResponse(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private GetPluginInfoResponse() {
      name_ = "";
      vendorVersion_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new GetPluginInfoResponse();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoResponse_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.Builder.class);
    }

    private int bitField0_;
    public static final int NAME_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object name_ = "";
    /**
     * <code>required string name = 1;</code>
     * @return Whether the name field is set.
     */
    @java.lang.Override
    public boolean hasName() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string name = 1;</code>
     * @return The name.
     */
    @java.lang.Override
    public java.lang.String getName() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          name_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string name = 1;</code>
     * @return The bytes for name.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getNameBytes() {
      java.lang.Object ref = name_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        name_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int VENDOR_VERSION_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object vendorVersion_ = "";
    /**
     * <code>required string vendor_version = 2;</code>
     * @return Whether the vendorVersion field is set.
     */
    @java.lang.Override
    public boolean hasVendorVersion() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required string vendor_version = 2;</code>
     * @return The vendorVersion.
     */
    @java.lang.Override
    public java.lang.String getVendorVersion() {
      java.lang.Object ref = vendorVersion_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          vendorVersion_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string vendor_version = 2;</code>
     * @return The bytes for vendorVersion.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getVendorVersionBytes() {
      java.lang.Object ref = vendorVersion_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        vendorVersion_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasName()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasVendorVersion()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, vendorVersion_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, vendorVersion_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse other = (org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse) obj;

      if (hasName() != other.hasName()) return false;
      if (hasName()) {
        if (!getName()
            .equals(other.getName())) return false;
      }
      if (hasVendorVersion() != other.hasVendorVersion()) return false;
      if (hasVendorVersion()) {
        if (!getVendorVersion()
            .equals(other.getVendorVersion())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasName()) {
        hash = (37 * hash) + NAME_FIELD_NUMBER;
        hash = (53 * hash) + getName().hashCode();
      }
      if (hasVendorVersion()) {
        hash = (37 * hash) + VENDOR_VERSION_FIELD_NUMBER;
        hash = (53 * hash) + getVendorVersion().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.GetPluginInfoResponse}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.GetPluginInfoResponse)
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponseOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoResponse_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        name_ = "";
        vendorVersion_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse build() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse buildPartial() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse result = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.name_ = name_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.vendorVersion_ = vendorVersion_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse) {
          return mergeFrom((org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse other) {
        if (other == org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse.getDefaultInstance()) return this;
        if (other.hasName()) {
          name_ = other.name_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasVendorVersion()) {
          vendorVersion_ = other.vendorVersion_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasName()) {
          return false;
        }
        if (!hasVendorVersion()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                name_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                vendorVersion_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object name_ = "";
      /**
       * <code>required string name = 1;</code>
       * @return Whether the name field is set.
       */
      public boolean hasName() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string name = 1;</code>
       * @return The name.
       */
      public java.lang.String getName() {
        java.lang.Object ref = name_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            name_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string name = 1;</code>
       * @return The bytes for name.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getNameBytes() {
        java.lang.Object ref = name_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          name_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string name = 1;</code>
       * @param value The name to set.
       * @return This builder for chaining.
       */
      public Builder setName(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string name = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearName() {
        name_ = getDefaultInstance().getName();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string name = 1;</code>
       * @param value The bytes for name to set.
       * @return This builder for chaining.
       */
      public Builder setNameBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        name_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object vendorVersion_ = "";
      /**
       * <code>required string vendor_version = 2;</code>
       * @return Whether the vendorVersion field is set.
       */
      public boolean hasVendorVersion() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required string vendor_version = 2;</code>
       * @return The vendorVersion.
       */
      public java.lang.String getVendorVersion() {
        java.lang.Object ref = vendorVersion_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            vendorVersion_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string vendor_version = 2;</code>
       * @return The bytes for vendorVersion.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getVendorVersionBytes() {
        java.lang.Object ref = vendorVersion_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          vendorVersion_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string vendor_version = 2;</code>
       * @param value The vendorVersion to set.
       * @return This builder for chaining.
       */
      public Builder setVendorVersion(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        vendorVersion_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required string vendor_version = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearVendorVersion() {
        vendorVersion_ = getDefaultInstance().getVendorVersion();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>required string vendor_version = 2;</code>
       * @param value The bytes for vendorVersion to set.
       * @return This builder for chaining.
       */
      public Builder setVendorVersionBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        vendorVersion_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.GetPluginInfoResponse)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.GetPluginInfoResponse)
    private static final org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse();
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<GetPluginInfoResponse>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<GetPluginInfoResponse>() {
      @java.lang.Override
      public GetPluginInfoResponse parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<GetPluginInfoResponse> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<GetPluginInfoResponse> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.GetPluginInfoResponse getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodePublishVolumeRequestOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodePublishVolumeRequest)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string volume_id = 1;</code>
     * @return Whether the volumeId field is set.
     */
    boolean hasVolumeId();
    /**
     * <code>required string volume_id = 1;</code>
     * @return The volumeId.
     */
    java.lang.String getVolumeId();
    /**
     * <code>required string volume_id = 1;</code>
     * @return The bytes for volumeId.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getVolumeIdBytes();

    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> 
        getPublishContextList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getPublishContext(int index);
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
     */
    int getPublishContextCount();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getPublishContextOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getPublishContextOrBuilder(
        int index);

    /**
     * <code>optional string staging_target_path = 3;</code>
     * @return Whether the stagingTargetPath field is set.
     */
    boolean hasStagingTargetPath();
    /**
     * <code>optional string staging_target_path = 3;</code>
     * @return The stagingTargetPath.
     */
    java.lang.String getStagingTargetPath();
    /**
     * <code>optional string staging_target_path = 3;</code>
     * @return The bytes for stagingTargetPath.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getStagingTargetPathBytes();

    /**
     * <code>required string target_path = 4;</code>
     * @return Whether the targetPath field is set.
     */
    boolean hasTargetPath();
    /**
     * <code>required string target_path = 4;</code>
     * @return The targetPath.
     */
    java.lang.String getTargetPath();
    /**
     * <code>required string target_path = 4;</code>
     * @return The bytes for targetPath.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTargetPathBytes();

    /**
     * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
     * @return Whether the volumeCapability field is set.
     */
    boolean hasVolumeCapability();
    /**
     * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
     * @return The volumeCapability.
     */
    org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapability();
    /**
     * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
     */
    org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilityOrBuilder();

    /**
     * <code>required bool readonly = 6;</code>
     * @return Whether the readonly field is set.
     */
    boolean hasReadonly();
    /**
     * <code>required bool readonly = 6;</code>
     * @return The readonly.
     */
    boolean getReadonly();

    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> 
        getSecretsList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getSecrets(int index);
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
     */
    int getSecretsCount();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getSecretsOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getSecretsOrBuilder(
        int index);

    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
     */
    java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> 
        getVolumeContextList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeContext(int index);
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
     */
    int getVolumeContextCount();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
     */
    java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getVolumeContextOrBuilderList();
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
     */
    org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeContextOrBuilder(
        int index);
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodePublishVolumeRequest}
   */
  public static final class NodePublishVolumeRequest extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodePublishVolumeRequest)
      NodePublishVolumeRequestOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodePublishVolumeRequest.newBuilder() to construct.
    private NodePublishVolumeRequest(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodePublishVolumeRequest() {
      volumeId_ = "";
      publishContext_ = java.util.Collections.emptyList();
      stagingTargetPath_ = "";
      targetPath_ = "";
      secrets_ = java.util.Collections.emptyList();
      volumeContext_ = java.util.Collections.emptyList();
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodePublishVolumeRequest();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeRequest_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.Builder.class);
    }

    private int bitField0_;
    public static final int VOLUME_ID_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object volumeId_ = "";
    /**
     * <code>required string volume_id = 1;</code>
     * @return Whether the volumeId field is set.
     */
    @java.lang.Override
    public boolean hasVolumeId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string volume_id = 1;</code>
     * @return The volumeId.
     */
    @java.lang.Override
    public java.lang.String getVolumeId() {
      java.lang.Object ref = volumeId_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          volumeId_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string volume_id = 1;</code>
     * @return The bytes for volumeId.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getVolumeIdBytes() {
      java.lang.Object ref = volumeId_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        volumeId_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int PUBLISH_CONTEXT_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> publishContext_;
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getPublishContextList() {
      return publishContext_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getPublishContextOrBuilderList() {
      return publishContext_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
     */
    @java.lang.Override
    public int getPublishContextCount() {
      return publishContext_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getPublishContext(int index) {
      return publishContext_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getPublishContextOrBuilder(
        int index) {
      return publishContext_.get(index);
    }

    public static final int STAGING_TARGET_PATH_FIELD_NUMBER = 3;
    @SuppressWarnings("serial")
    private volatile java.lang.Object stagingTargetPath_ = "";
    /**
     * <code>optional string staging_target_path = 3;</code>
     * @return Whether the stagingTargetPath field is set.
     */
    @java.lang.Override
    public boolean hasStagingTargetPath() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>optional string staging_target_path = 3;</code>
     * @return The stagingTargetPath.
     */
    @java.lang.Override
    public java.lang.String getStagingTargetPath() {
      java.lang.Object ref = stagingTargetPath_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          stagingTargetPath_ = s;
        }
        return s;
      }
    }
    /**
     * <code>optional string staging_target_path = 3;</code>
     * @return The bytes for stagingTargetPath.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getStagingTargetPathBytes() {
      java.lang.Object ref = stagingTargetPath_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        stagingTargetPath_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int TARGET_PATH_FIELD_NUMBER = 4;
    @SuppressWarnings("serial")
    private volatile java.lang.Object targetPath_ = "";
    /**
     * <code>required string target_path = 4;</code>
     * @return Whether the targetPath field is set.
     */
    @java.lang.Override
    public boolean hasTargetPath() {
      return ((bitField0_ & 0x00000004) != 0);
    }
    /**
     * <code>required string target_path = 4;</code>
     * @return The targetPath.
     */
    @java.lang.Override
    public java.lang.String getTargetPath() {
      java.lang.Object ref = targetPath_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          targetPath_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string target_path = 4;</code>
     * @return The bytes for targetPath.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTargetPathBytes() {
      java.lang.Object ref = targetPath_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        targetPath_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int VOLUME_CAPABILITY_FIELD_NUMBER = 5;
    private org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability volumeCapability_;
    /**
     * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
     * @return Whether the volumeCapability field is set.
     */
    @java.lang.Override
    public boolean hasVolumeCapability() {
      return ((bitField0_ & 0x00000008) != 0);
    }
    /**
     * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
     * @return The volumeCapability.
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapability() {
      return volumeCapability_ == null ? org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance() : volumeCapability_;
    }
    /**
     * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilityOrBuilder() {
      return volumeCapability_ == null ? org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance() : volumeCapability_;
    }

    public static final int READONLY_FIELD_NUMBER = 6;
    private boolean readonly_ = false;
    /**
     * <code>required bool readonly = 6;</code>
     * @return Whether the readonly field is set.
     */
    @java.lang.Override
    public boolean hasReadonly() {
      return ((bitField0_ & 0x00000010) != 0);
    }
    /**
     * <code>required bool readonly = 6;</code>
     * @return The readonly.
     */
    @java.lang.Override
    public boolean getReadonly() {
      return readonly_;
    }

    public static final int SECRETS_FIELD_NUMBER = 7;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> secrets_;
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getSecretsList() {
      return secrets_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getSecretsOrBuilderList() {
      return secrets_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
     */
    @java.lang.Override
    public int getSecretsCount() {
      return secrets_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getSecrets(int index) {
      return secrets_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getSecretsOrBuilder(
        int index) {
      return secrets_.get(index);
    }

    public static final int VOLUME_CONTEXT_FIELD_NUMBER = 8;
    @SuppressWarnings("serial")
    private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> volumeContext_;
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
     */
    @java.lang.Override
    public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getVolumeContextList() {
      return volumeContext_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
     */
    @java.lang.Override
    public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
        getVolumeContextOrBuilderList() {
      return volumeContext_;
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
     */
    @java.lang.Override
    public int getVolumeContextCount() {
      return volumeContext_.size();
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeContext(int index) {
      return volumeContext_.get(index);
    }
    /**
     * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
     */
    @java.lang.Override
    public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeContextOrBuilder(
        int index) {
      return volumeContext_.get(index);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasVolumeId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasTargetPath()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasVolumeCapability()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasReadonly()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!getVolumeCapability().isInitialized()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, volumeId_);
      }
      for (int i = 0; i < publishContext_.size(); i++) {
        output.writeMessage(2, publishContext_.get(i));
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 3, stagingTargetPath_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 4, targetPath_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        output.writeMessage(5, getVolumeCapability());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        output.writeBool(6, readonly_);
      }
      for (int i = 0; i < secrets_.size(); i++) {
        output.writeMessage(7, secrets_.get(i));
      }
      for (int i = 0; i < volumeContext_.size(); i++) {
        output.writeMessage(8, volumeContext_.get(i));
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, volumeId_);
      }
      for (int i = 0; i < publishContext_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(2, publishContext_.get(i));
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(3, stagingTargetPath_);
      }
      if (((bitField0_ & 0x00000004) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(4, targetPath_);
      }
      if (((bitField0_ & 0x00000008) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(5, getVolumeCapability());
      }
      if (((bitField0_ & 0x00000010) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeBoolSize(6, readonly_);
      }
      for (int i = 0; i < secrets_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(7, secrets_.get(i));
      }
      for (int i = 0; i < volumeContext_.size(); i++) {
        size += org.apache.hadoop.thirdparty.protobuf.CodedOutputStream
          .computeMessageSize(8, volumeContext_.get(i));
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest other = (org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest) obj;

      if (hasVolumeId() != other.hasVolumeId()) return false;
      if (hasVolumeId()) {
        if (!getVolumeId()
            .equals(other.getVolumeId())) return false;
      }
      if (!getPublishContextList()
          .equals(other.getPublishContextList())) return false;
      if (hasStagingTargetPath() != other.hasStagingTargetPath()) return false;
      if (hasStagingTargetPath()) {
        if (!getStagingTargetPath()
            .equals(other.getStagingTargetPath())) return false;
      }
      if (hasTargetPath() != other.hasTargetPath()) return false;
      if (hasTargetPath()) {
        if (!getTargetPath()
            .equals(other.getTargetPath())) return false;
      }
      if (hasVolumeCapability() != other.hasVolumeCapability()) return false;
      if (hasVolumeCapability()) {
        if (!getVolumeCapability()
            .equals(other.getVolumeCapability())) return false;
      }
      if (hasReadonly() != other.hasReadonly()) return false;
      if (hasReadonly()) {
        if (getReadonly()
            != other.getReadonly()) return false;
      }
      if (!getSecretsList()
          .equals(other.getSecretsList())) return false;
      if (!getVolumeContextList()
          .equals(other.getVolumeContextList())) return false;
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasVolumeId()) {
        hash = (37 * hash) + VOLUME_ID_FIELD_NUMBER;
        hash = (53 * hash) + getVolumeId().hashCode();
      }
      if (getPublishContextCount() > 0) {
        hash = (37 * hash) + PUBLISH_CONTEXT_FIELD_NUMBER;
        hash = (53 * hash) + getPublishContextList().hashCode();
      }
      if (hasStagingTargetPath()) {
        hash = (37 * hash) + STAGING_TARGET_PATH_FIELD_NUMBER;
        hash = (53 * hash) + getStagingTargetPath().hashCode();
      }
      if (hasTargetPath()) {
        hash = (37 * hash) + TARGET_PATH_FIELD_NUMBER;
        hash = (53 * hash) + getTargetPath().hashCode();
      }
      if (hasVolumeCapability()) {
        hash = (37 * hash) + VOLUME_CAPABILITY_FIELD_NUMBER;
        hash = (53 * hash) + getVolumeCapability().hashCode();
      }
      if (hasReadonly()) {
        hash = (37 * hash) + READONLY_FIELD_NUMBER;
        hash = (53 * hash) + org.apache.hadoop.thirdparty.protobuf.Internal.hashBoolean(
            getReadonly());
      }
      if (getSecretsCount() > 0) {
        hash = (37 * hash) + SECRETS_FIELD_NUMBER;
        hash = (53 * hash) + getSecretsList().hashCode();
      }
      if (getVolumeContextCount() > 0) {
        hash = (37 * hash) + VOLUME_CONTEXT_FIELD_NUMBER;
        hash = (53 * hash) + getVolumeContextList().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodePublishVolumeRequest}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodePublishVolumeRequest)
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequestOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeRequest_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.newBuilder()
      private Builder() {
        maybeForceBuilderInitialization();
      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);
        maybeForceBuilderInitialization();
      }
      private void maybeForceBuilderInitialization() {
        if (org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
                .alwaysUseFieldBuilders) {
          getPublishContextFieldBuilder();
          getVolumeCapabilityFieldBuilder();
          getSecretsFieldBuilder();
          getVolumeContextFieldBuilder();
        }
      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        volumeId_ = "";
        if (publishContextBuilder_ == null) {
          publishContext_ = java.util.Collections.emptyList();
        } else {
          publishContext_ = null;
          publishContextBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000002);
        stagingTargetPath_ = "";
        targetPath_ = "";
        volumeCapability_ = null;
        if (volumeCapabilityBuilder_ != null) {
          volumeCapabilityBuilder_.dispose();
          volumeCapabilityBuilder_ = null;
        }
        readonly_ = false;
        if (secretsBuilder_ == null) {
          secrets_ = java.util.Collections.emptyList();
        } else {
          secrets_ = null;
          secretsBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000040);
        if (volumeContextBuilder_ == null) {
          volumeContext_ = java.util.Collections.emptyList();
        } else {
          volumeContext_ = null;
          volumeContextBuilder_.clear();
        }
        bitField0_ = (bitField0_ & ~0x00000080);
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest build() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest buildPartial() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest result = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest(this);
        buildPartialRepeatedFields(result);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartialRepeatedFields(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest result) {
        if (publishContextBuilder_ == null) {
          if (((bitField0_ & 0x00000002) != 0)) {
            publishContext_ = java.util.Collections.unmodifiableList(publishContext_);
            bitField0_ = (bitField0_ & ~0x00000002);
          }
          result.publishContext_ = publishContext_;
        } else {
          result.publishContext_ = publishContextBuilder_.build();
        }
        if (secretsBuilder_ == null) {
          if (((bitField0_ & 0x00000040) != 0)) {
            secrets_ = java.util.Collections.unmodifiableList(secrets_);
            bitField0_ = (bitField0_ & ~0x00000040);
          }
          result.secrets_ = secrets_;
        } else {
          result.secrets_ = secretsBuilder_.build();
        }
        if (volumeContextBuilder_ == null) {
          if (((bitField0_ & 0x00000080) != 0)) {
            volumeContext_ = java.util.Collections.unmodifiableList(volumeContext_);
            bitField0_ = (bitField0_ & ~0x00000080);
          }
          result.volumeContext_ = volumeContext_;
        } else {
          result.volumeContext_ = volumeContextBuilder_.build();
        }
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.volumeId_ = volumeId_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000004) != 0)) {
          result.stagingTargetPath_ = stagingTargetPath_;
          to_bitField0_ |= 0x00000002;
        }
        if (((from_bitField0_ & 0x00000008) != 0)) {
          result.targetPath_ = targetPath_;
          to_bitField0_ |= 0x00000004;
        }
        if (((from_bitField0_ & 0x00000010) != 0)) {
          result.volumeCapability_ = volumeCapabilityBuilder_ == null
              ? volumeCapability_
              : volumeCapabilityBuilder_.build();
          to_bitField0_ |= 0x00000008;
        }
        if (((from_bitField0_ & 0x00000020) != 0)) {
          result.readonly_ = readonly_;
          to_bitField0_ |= 0x00000010;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest) {
          return mergeFrom((org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest other) {
        if (other == org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest.getDefaultInstance()) return this;
        if (other.hasVolumeId()) {
          volumeId_ = other.volumeId_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (publishContextBuilder_ == null) {
          if (!other.publishContext_.isEmpty()) {
            if (publishContext_.isEmpty()) {
              publishContext_ = other.publishContext_;
              bitField0_ = (bitField0_ & ~0x00000002);
            } else {
              ensurePublishContextIsMutable();
              publishContext_.addAll(other.publishContext_);
            }
            onChanged();
          }
        } else {
          if (!other.publishContext_.isEmpty()) {
            if (publishContextBuilder_.isEmpty()) {
              publishContextBuilder_.dispose();
              publishContextBuilder_ = null;
              publishContext_ = other.publishContext_;
              bitField0_ = (bitField0_ & ~0x00000002);
              publishContextBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getPublishContextFieldBuilder() : null;
            } else {
              publishContextBuilder_.addAllMessages(other.publishContext_);
            }
          }
        }
        if (other.hasStagingTargetPath()) {
          stagingTargetPath_ = other.stagingTargetPath_;
          bitField0_ |= 0x00000004;
          onChanged();
        }
        if (other.hasTargetPath()) {
          targetPath_ = other.targetPath_;
          bitField0_ |= 0x00000008;
          onChanged();
        }
        if (other.hasVolumeCapability()) {
          mergeVolumeCapability(other.getVolumeCapability());
        }
        if (other.hasReadonly()) {
          setReadonly(other.getReadonly());
        }
        if (secretsBuilder_ == null) {
          if (!other.secrets_.isEmpty()) {
            if (secrets_.isEmpty()) {
              secrets_ = other.secrets_;
              bitField0_ = (bitField0_ & ~0x00000040);
            } else {
              ensureSecretsIsMutable();
              secrets_.addAll(other.secrets_);
            }
            onChanged();
          }
        } else {
          if (!other.secrets_.isEmpty()) {
            if (secretsBuilder_.isEmpty()) {
              secretsBuilder_.dispose();
              secretsBuilder_ = null;
              secrets_ = other.secrets_;
              bitField0_ = (bitField0_ & ~0x00000040);
              secretsBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getSecretsFieldBuilder() : null;
            } else {
              secretsBuilder_.addAllMessages(other.secrets_);
            }
          }
        }
        if (volumeContextBuilder_ == null) {
          if (!other.volumeContext_.isEmpty()) {
            if (volumeContext_.isEmpty()) {
              volumeContext_ = other.volumeContext_;
              bitField0_ = (bitField0_ & ~0x00000080);
            } else {
              ensureVolumeContextIsMutable();
              volumeContext_.addAll(other.volumeContext_);
            }
            onChanged();
          }
        } else {
          if (!other.volumeContext_.isEmpty()) {
            if (volumeContextBuilder_.isEmpty()) {
              volumeContextBuilder_.dispose();
              volumeContextBuilder_ = null;
              volumeContext_ = other.volumeContext_;
              bitField0_ = (bitField0_ & ~0x00000080);
              volumeContextBuilder_ = 
                org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
                   getVolumeContextFieldBuilder() : null;
            } else {
              volumeContextBuilder_.addAllMessages(other.volumeContext_);
            }
          }
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasVolumeId()) {
          return false;
        }
        if (!hasTargetPath()) {
          return false;
        }
        if (!hasVolumeCapability()) {
          return false;
        }
        if (!hasReadonly()) {
          return false;
        }
        if (!getVolumeCapability().isInitialized()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                volumeId_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER,
                        extensionRegistry);
                if (publishContextBuilder_ == null) {
                  ensurePublishContextIsMutable();
                  publishContext_.add(m);
                } else {
                  publishContextBuilder_.addMessage(m);
                }
                break;
              } // case 18
              case 26: {
                stagingTargetPath_ = input.readBytes();
                bitField0_ |= 0x00000004;
                break;
              } // case 26
              case 34: {
                targetPath_ = input.readBytes();
                bitField0_ |= 0x00000008;
                break;
              } // case 34
              case 42: {
                input.readMessage(
                    getVolumeCapabilityFieldBuilder().getBuilder(),
                    extensionRegistry);
                bitField0_ |= 0x00000010;
                break;
              } // case 42
              case 48: {
                readonly_ = input.readBool();
                bitField0_ |= 0x00000020;
                break;
              } // case 48
              case 58: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER,
                        extensionRegistry);
                if (secretsBuilder_ == null) {
                  ensureSecretsIsMutable();
                  secrets_.add(m);
                } else {
                  secretsBuilder_.addMessage(m);
                }
                break;
              } // case 58
              case 66: {
                org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto m =
                    input.readMessage(
                        org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.PARSER,
                        extensionRegistry);
                if (volumeContextBuilder_ == null) {
                  ensureVolumeContextIsMutable();
                  volumeContext_.add(m);
                } else {
                  volumeContextBuilder_.addMessage(m);
                }
                break;
              } // case 66
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object volumeId_ = "";
      /**
       * <code>required string volume_id = 1;</code>
       * @return Whether the volumeId field is set.
       */
      public boolean hasVolumeId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @return The volumeId.
       */
      public java.lang.String getVolumeId() {
        java.lang.Object ref = volumeId_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            volumeId_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @return The bytes for volumeId.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getVolumeIdBytes() {
        java.lang.Object ref = volumeId_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          volumeId_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @param value The volumeId to set.
       * @return This builder for chaining.
       */
      public Builder setVolumeId(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        volumeId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearVolumeId() {
        volumeId_ = getDefaultInstance().getVolumeId();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @param value The bytes for volumeId to set.
       * @return This builder for chaining.
       */
      public Builder setVolumeIdBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        volumeId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> publishContext_ =
        java.util.Collections.emptyList();
      private void ensurePublishContextIsMutable() {
        if (!((bitField0_ & 0x00000002) != 0)) {
          publishContext_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto>(publishContext_);
          bitField0_ |= 0x00000002;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> publishContextBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getPublishContextList() {
        if (publishContextBuilder_ == null) {
          return java.util.Collections.unmodifiableList(publishContext_);
        } else {
          return publishContextBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public int getPublishContextCount() {
        if (publishContextBuilder_ == null) {
          return publishContext_.size();
        } else {
          return publishContextBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getPublishContext(int index) {
        if (publishContextBuilder_ == null) {
          return publishContext_.get(index);
        } else {
          return publishContextBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public Builder setPublishContext(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (publishContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensurePublishContextIsMutable();
          publishContext_.set(index, value);
          onChanged();
        } else {
          publishContextBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public Builder setPublishContext(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (publishContextBuilder_ == null) {
          ensurePublishContextIsMutable();
          publishContext_.set(index, builderForValue.build());
          onChanged();
        } else {
          publishContextBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public Builder addPublishContext(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (publishContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensurePublishContextIsMutable();
          publishContext_.add(value);
          onChanged();
        } else {
          publishContextBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public Builder addPublishContext(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (publishContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensurePublishContextIsMutable();
          publishContext_.add(index, value);
          onChanged();
        } else {
          publishContextBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public Builder addPublishContext(
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (publishContextBuilder_ == null) {
          ensurePublishContextIsMutable();
          publishContext_.add(builderForValue.build());
          onChanged();
        } else {
          publishContextBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public Builder addPublishContext(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (publishContextBuilder_ == null) {
          ensurePublishContextIsMutable();
          publishContext_.add(index, builderForValue.build());
          onChanged();
        } else {
          publishContextBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public Builder addAllPublishContext(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
        if (publishContextBuilder_ == null) {
          ensurePublishContextIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, publishContext_);
          onChanged();
        } else {
          publishContextBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public Builder clearPublishContext() {
        if (publishContextBuilder_ == null) {
          publishContext_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000002);
          onChanged();
        } else {
          publishContextBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public Builder removePublishContext(int index) {
        if (publishContextBuilder_ == null) {
          ensurePublishContextIsMutable();
          publishContext_.remove(index);
          onChanged();
        } else {
          publishContextBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getPublishContextBuilder(
          int index) {
        return getPublishContextFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getPublishContextOrBuilder(
          int index) {
        if (publishContextBuilder_ == null) {
          return publishContext_.get(index);  } else {
          return publishContextBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
           getPublishContextOrBuilderList() {
        if (publishContextBuilder_ != null) {
          return publishContextBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(publishContext_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addPublishContextBuilder() {
        return getPublishContextFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addPublishContextBuilder(
          int index) {
        return getPublishContextFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto publish_context = 2;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder> 
           getPublishContextBuilderList() {
        return getPublishContextFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
          getPublishContextFieldBuilder() {
        if (publishContextBuilder_ == null) {
          publishContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
                  publishContext_,
                  ((bitField0_ & 0x00000002) != 0),
                  getParentForChildren(),
                  isClean());
          publishContext_ = null;
        }
        return publishContextBuilder_;
      }

      private java.lang.Object stagingTargetPath_ = "";
      /**
       * <code>optional string staging_target_path = 3;</code>
       * @return Whether the stagingTargetPath field is set.
       */
      public boolean hasStagingTargetPath() {
        return ((bitField0_ & 0x00000004) != 0);
      }
      /**
       * <code>optional string staging_target_path = 3;</code>
       * @return The stagingTargetPath.
       */
      public java.lang.String getStagingTargetPath() {
        java.lang.Object ref = stagingTargetPath_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            stagingTargetPath_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>optional string staging_target_path = 3;</code>
       * @return The bytes for stagingTargetPath.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getStagingTargetPathBytes() {
        java.lang.Object ref = stagingTargetPath_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          stagingTargetPath_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>optional string staging_target_path = 3;</code>
       * @param value The stagingTargetPath to set.
       * @return This builder for chaining.
       */
      public Builder setStagingTargetPath(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        stagingTargetPath_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }
      /**
       * <code>optional string staging_target_path = 3;</code>
       * @return This builder for chaining.
       */
      public Builder clearStagingTargetPath() {
        stagingTargetPath_ = getDefaultInstance().getStagingTargetPath();
        bitField0_ = (bitField0_ & ~0x00000004);
        onChanged();
        return this;
      }
      /**
       * <code>optional string staging_target_path = 3;</code>
       * @param value The bytes for stagingTargetPath to set.
       * @return This builder for chaining.
       */
      public Builder setStagingTargetPathBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        stagingTargetPath_ = value;
        bitField0_ |= 0x00000004;
        onChanged();
        return this;
      }

      private java.lang.Object targetPath_ = "";
      /**
       * <code>required string target_path = 4;</code>
       * @return Whether the targetPath field is set.
       */
      public boolean hasTargetPath() {
        return ((bitField0_ & 0x00000008) != 0);
      }
      /**
       * <code>required string target_path = 4;</code>
       * @return The targetPath.
       */
      public java.lang.String getTargetPath() {
        java.lang.Object ref = targetPath_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            targetPath_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string target_path = 4;</code>
       * @return The bytes for targetPath.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTargetPathBytes() {
        java.lang.Object ref = targetPath_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          targetPath_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string target_path = 4;</code>
       * @param value The targetPath to set.
       * @return This builder for chaining.
       */
      public Builder setTargetPath(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        targetPath_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }
      /**
       * <code>required string target_path = 4;</code>
       * @return This builder for chaining.
       */
      public Builder clearTargetPath() {
        targetPath_ = getDefaultInstance().getTargetPath();
        bitField0_ = (bitField0_ & ~0x00000008);
        onChanged();
        return this;
      }
      /**
       * <code>required string target_path = 4;</code>
       * @param value The bytes for targetPath to set.
       * @return This builder for chaining.
       */
      public Builder setTargetPathBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        targetPath_ = value;
        bitField0_ |= 0x00000008;
        onChanged();
        return this;
      }

      private org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability volumeCapability_;
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder> volumeCapabilityBuilder_;
      /**
       * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
       * @return Whether the volumeCapability field is set.
       */
      public boolean hasVolumeCapability() {
        return ((bitField0_ & 0x00000010) != 0);
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
       * @return The volumeCapability.
       */
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability getVolumeCapability() {
        if (volumeCapabilityBuilder_ == null) {
          return volumeCapability_ == null ? org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance() : volumeCapability_;
        } else {
          return volumeCapabilityBuilder_.getMessage();
        }
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
       */
      public Builder setVolumeCapability(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability value) {
        if (volumeCapabilityBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          volumeCapability_ = value;
        } else {
          volumeCapabilityBuilder_.setMessage(value);
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
       */
      public Builder setVolumeCapability(
          org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder builderForValue) {
        if (volumeCapabilityBuilder_ == null) {
          volumeCapability_ = builderForValue.build();
        } else {
          volumeCapabilityBuilder_.setMessage(builderForValue.build());
        }
        bitField0_ |= 0x00000010;
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
       */
      public Builder mergeVolumeCapability(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability value) {
        if (volumeCapabilityBuilder_ == null) {
          if (((bitField0_ & 0x00000010) != 0) &&
            volumeCapability_ != null &&
            volumeCapability_ != org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance()) {
            getVolumeCapabilityBuilder().mergeFrom(value);
          } else {
            volumeCapability_ = value;
          }
        } else {
          volumeCapabilityBuilder_.mergeFrom(value);
        }
        if (volumeCapability_ != null) {
          bitField0_ |= 0x00000010;
          onChanged();
        }
        return this;
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
       */
      public Builder clearVolumeCapability() {
        bitField0_ = (bitField0_ & ~0x00000010);
        volumeCapability_ = null;
        if (volumeCapabilityBuilder_ != null) {
          volumeCapabilityBuilder_.dispose();
          volumeCapabilityBuilder_ = null;
        }
        onChanged();
        return this;
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder getVolumeCapabilityBuilder() {
        bitField0_ |= 0x00000010;
        onChanged();
        return getVolumeCapabilityFieldBuilder().getBuilder();
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
       */
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder getVolumeCapabilityOrBuilder() {
        if (volumeCapabilityBuilder_ != null) {
          return volumeCapabilityBuilder_.getMessageOrBuilder();
        } else {
          return volumeCapability_ == null ?
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.getDefaultInstance() : volumeCapability_;
        }
      }
      /**
       * <code>required .hadoop.yarn.VolumeCapability volume_capability = 5;</code>
       */
      private org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
          org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder> 
          getVolumeCapabilityFieldBuilder() {
        if (volumeCapabilityBuilder_ == null) {
          volumeCapabilityBuilder_ = new org.apache.hadoop.thirdparty.protobuf.SingleFieldBuilderV3<
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapability.Builder, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.VolumeCapabilityOrBuilder>(
                  getVolumeCapability(),
                  getParentForChildren(),
                  isClean());
          volumeCapability_ = null;
        }
        return volumeCapabilityBuilder_;
      }

      private boolean readonly_ ;
      /**
       * <code>required bool readonly = 6;</code>
       * @return Whether the readonly field is set.
       */
      @java.lang.Override
      public boolean hasReadonly() {
        return ((bitField0_ & 0x00000020) != 0);
      }
      /**
       * <code>required bool readonly = 6;</code>
       * @return The readonly.
       */
      @java.lang.Override
      public boolean getReadonly() {
        return readonly_;
      }
      /**
       * <code>required bool readonly = 6;</code>
       * @param value The readonly to set.
       * @return This builder for chaining.
       */
      public Builder setReadonly(boolean value) {

        readonly_ = value;
        bitField0_ |= 0x00000020;
        onChanged();
        return this;
      }
      /**
       * <code>required bool readonly = 6;</code>
       * @return This builder for chaining.
       */
      public Builder clearReadonly() {
        bitField0_ = (bitField0_ & ~0x00000020);
        readonly_ = false;
        onChanged();
        return this;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> secrets_ =
        java.util.Collections.emptyList();
      private void ensureSecretsIsMutable() {
        if (!((bitField0_ & 0x00000040) != 0)) {
          secrets_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto>(secrets_);
          bitField0_ |= 0x00000040;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> secretsBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getSecretsList() {
        if (secretsBuilder_ == null) {
          return java.util.Collections.unmodifiableList(secrets_);
        } else {
          return secretsBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public int getSecretsCount() {
        if (secretsBuilder_ == null) {
          return secrets_.size();
        } else {
          return secretsBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getSecrets(int index) {
        if (secretsBuilder_ == null) {
          return secrets_.get(index);
        } else {
          return secretsBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public Builder setSecrets(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (secretsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSecretsIsMutable();
          secrets_.set(index, value);
          onChanged();
        } else {
          secretsBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public Builder setSecrets(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (secretsBuilder_ == null) {
          ensureSecretsIsMutable();
          secrets_.set(index, builderForValue.build());
          onChanged();
        } else {
          secretsBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public Builder addSecrets(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (secretsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSecretsIsMutable();
          secrets_.add(value);
          onChanged();
        } else {
          secretsBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public Builder addSecrets(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (secretsBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureSecretsIsMutable();
          secrets_.add(index, value);
          onChanged();
        } else {
          secretsBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public Builder addSecrets(
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (secretsBuilder_ == null) {
          ensureSecretsIsMutable();
          secrets_.add(builderForValue.build());
          onChanged();
        } else {
          secretsBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public Builder addSecrets(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (secretsBuilder_ == null) {
          ensureSecretsIsMutable();
          secrets_.add(index, builderForValue.build());
          onChanged();
        } else {
          secretsBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public Builder addAllSecrets(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
        if (secretsBuilder_ == null) {
          ensureSecretsIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, secrets_);
          onChanged();
        } else {
          secretsBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public Builder clearSecrets() {
        if (secretsBuilder_ == null) {
          secrets_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000040);
          onChanged();
        } else {
          secretsBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public Builder removeSecrets(int index) {
        if (secretsBuilder_ == null) {
          ensureSecretsIsMutable();
          secrets_.remove(index);
          onChanged();
        } else {
          secretsBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getSecretsBuilder(
          int index) {
        return getSecretsFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getSecretsOrBuilder(
          int index) {
        if (secretsBuilder_ == null) {
          return secrets_.get(index);  } else {
          return secretsBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
           getSecretsOrBuilderList() {
        if (secretsBuilder_ != null) {
          return secretsBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(secrets_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addSecretsBuilder() {
        return getSecretsFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addSecretsBuilder(
          int index) {
        return getSecretsFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto secrets = 7;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder> 
           getSecretsBuilderList() {
        return getSecretsFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
          getSecretsFieldBuilder() {
        if (secretsBuilder_ == null) {
          secretsBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
                  secrets_,
                  ((bitField0_ & 0x00000040) != 0),
                  getParentForChildren(),
                  isClean());
          secrets_ = null;
        }
        return secretsBuilder_;
      }

      private java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> volumeContext_ =
        java.util.Collections.emptyList();
      private void ensureVolumeContextIsMutable() {
        if (!((bitField0_ & 0x00000080) != 0)) {
          volumeContext_ = new java.util.ArrayList<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto>(volumeContext_);
          bitField0_ |= 0x00000080;
         }
      }

      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> volumeContextBuilder_;

      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> getVolumeContextList() {
        if (volumeContextBuilder_ == null) {
          return java.util.Collections.unmodifiableList(volumeContext_);
        } else {
          return volumeContextBuilder_.getMessageList();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public int getVolumeContextCount() {
        if (volumeContextBuilder_ == null) {
          return volumeContext_.size();
        } else {
          return volumeContextBuilder_.getCount();
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto getVolumeContext(int index) {
        if (volumeContextBuilder_ == null) {
          return volumeContext_.get(index);
        } else {
          return volumeContextBuilder_.getMessage(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public Builder setVolumeContext(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (volumeContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureVolumeContextIsMutable();
          volumeContext_.set(index, value);
          onChanged();
        } else {
          volumeContextBuilder_.setMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public Builder setVolumeContext(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (volumeContextBuilder_ == null) {
          ensureVolumeContextIsMutable();
          volumeContext_.set(index, builderForValue.build());
          onChanged();
        } else {
          volumeContextBuilder_.setMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public Builder addVolumeContext(org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (volumeContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureVolumeContextIsMutable();
          volumeContext_.add(value);
          onChanged();
        } else {
          volumeContextBuilder_.addMessage(value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public Builder addVolumeContext(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto value) {
        if (volumeContextBuilder_ == null) {
          if (value == null) {
            throw new NullPointerException();
          }
          ensureVolumeContextIsMutable();
          volumeContext_.add(index, value);
          onChanged();
        } else {
          volumeContextBuilder_.addMessage(index, value);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public Builder addVolumeContext(
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (volumeContextBuilder_ == null) {
          ensureVolumeContextIsMutable();
          volumeContext_.add(builderForValue.build());
          onChanged();
        } else {
          volumeContextBuilder_.addMessage(builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public Builder addVolumeContext(
          int index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder builderForValue) {
        if (volumeContextBuilder_ == null) {
          ensureVolumeContextIsMutable();
          volumeContext_.add(index, builderForValue.build());
          onChanged();
        } else {
          volumeContextBuilder_.addMessage(index, builderForValue.build());
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public Builder addAllVolumeContext(
          java.lang.Iterable<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto> values) {
        if (volumeContextBuilder_ == null) {
          ensureVolumeContextIsMutable();
          org.apache.hadoop.thirdparty.protobuf.AbstractMessageLite.Builder.addAll(
              values, volumeContext_);
          onChanged();
        } else {
          volumeContextBuilder_.addAllMessages(values);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public Builder clearVolumeContext() {
        if (volumeContextBuilder_ == null) {
          volumeContext_ = java.util.Collections.emptyList();
          bitField0_ = (bitField0_ & ~0x00000080);
          onChanged();
        } else {
          volumeContextBuilder_.clear();
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public Builder removeVolumeContext(int index) {
        if (volumeContextBuilder_ == null) {
          ensureVolumeContextIsMutable();
          volumeContext_.remove(index);
          onChanged();
        } else {
          volumeContextBuilder_.remove(index);
        }
        return this;
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder getVolumeContextBuilder(
          int index) {
        return getVolumeContextFieldBuilder().getBuilder(index);
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder getVolumeContextOrBuilder(
          int index) {
        if (volumeContextBuilder_ == null) {
          return volumeContext_.get(index);  } else {
          return volumeContextBuilder_.getMessageOrBuilder(index);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public java.util.List<? extends org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
           getVolumeContextOrBuilderList() {
        if (volumeContextBuilder_ != null) {
          return volumeContextBuilder_.getMessageOrBuilderList();
        } else {
          return java.util.Collections.unmodifiableList(volumeContext_);
        }
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addVolumeContextBuilder() {
        return getVolumeContextFieldBuilder().addBuilder(
            org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder addVolumeContextBuilder(
          int index) {
        return getVolumeContextFieldBuilder().addBuilder(
            index, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.getDefaultInstance());
      }
      /**
       * <code>repeated .hadoop.yarn.StringStringMapProto volume_context = 8;</code>
       */
      public java.util.List<org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder> 
           getVolumeContextBuilderList() {
        return getVolumeContextFieldBuilder().getBuilderList();
      }
      private org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
          org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder> 
          getVolumeContextFieldBuilder() {
        if (volumeContextBuilder_ == null) {
          volumeContextBuilder_ = new org.apache.hadoop.thirdparty.protobuf.RepeatedFieldBuilderV3<
              org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProto.Builder, org.apache.hadoop.yarn.proto.YarnProtos.StringStringMapProtoOrBuilder>(
                  volumeContext_,
                  ((bitField0_ & 0x00000080) != 0),
                  getParentForChildren(),
                  isClean());
          volumeContext_ = null;
        }
        return volumeContextBuilder_;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodePublishVolumeRequest)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodePublishVolumeRequest)
    private static final org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest();
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodePublishVolumeRequest>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodePublishVolumeRequest>() {
      @java.lang.Override
      public NodePublishVolumeRequest parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodePublishVolumeRequest> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodePublishVolumeRequest> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeRequest getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodePublishVolumeResponseOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodePublishVolumeResponse)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * <pre>
   * Intentionally empty.
   * </pre>
   *
   * Protobuf type {@code hadoop.yarn.NodePublishVolumeResponse}
   */
  public static final class NodePublishVolumeResponse extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodePublishVolumeResponse)
      NodePublishVolumeResponseOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodePublishVolumeResponse.newBuilder() to construct.
    private NodePublishVolumeResponse(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodePublishVolumeResponse() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodePublishVolumeResponse();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeResponse_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse other = (org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * <pre>
     * Intentionally empty.
     * </pre>
     *
     * Protobuf type {@code hadoop.yarn.NodePublishVolumeResponse}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodePublishVolumeResponse)
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponseOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeResponse_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse build() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse buildPartial() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse result = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse) {
          return mergeFrom((org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse other) {
        if (other == org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodePublishVolumeResponse)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodePublishVolumeResponse)
    private static final org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse();
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodePublishVolumeResponse>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodePublishVolumeResponse>() {
      @java.lang.Override
      public NodePublishVolumeResponse parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodePublishVolumeResponse> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodePublishVolumeResponse> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodePublishVolumeResponse getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeUnpublishVolumeRequestOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeUnpublishVolumeRequest)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {

    /**
     * <code>required string volume_id = 1;</code>
     * @return Whether the volumeId field is set.
     */
    boolean hasVolumeId();
    /**
     * <code>required string volume_id = 1;</code>
     * @return The volumeId.
     */
    java.lang.String getVolumeId();
    /**
     * <code>required string volume_id = 1;</code>
     * @return The bytes for volumeId.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getVolumeIdBytes();

    /**
     * <code>required string target_path = 2;</code>
     * @return Whether the targetPath field is set.
     */
    boolean hasTargetPath();
    /**
     * <code>required string target_path = 2;</code>
     * @return The targetPath.
     */
    java.lang.String getTargetPath();
    /**
     * <code>required string target_path = 2;</code>
     * @return The bytes for targetPath.
     */
    org.apache.hadoop.thirdparty.protobuf.ByteString
        getTargetPathBytes();
  }
  /**
   * Protobuf type {@code hadoop.yarn.NodeUnpublishVolumeRequest}
   */
  public static final class NodeUnpublishVolumeRequest extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeUnpublishVolumeRequest)
      NodeUnpublishVolumeRequestOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeUnpublishVolumeRequest.newBuilder() to construct.
    private NodeUnpublishVolumeRequest(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeUnpublishVolumeRequest() {
      volumeId_ = "";
      targetPath_ = "";
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeUnpublishVolumeRequest();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.Builder.class);
    }

    private int bitField0_;
    public static final int VOLUME_ID_FIELD_NUMBER = 1;
    @SuppressWarnings("serial")
    private volatile java.lang.Object volumeId_ = "";
    /**
     * <code>required string volume_id = 1;</code>
     * @return Whether the volumeId field is set.
     */
    @java.lang.Override
    public boolean hasVolumeId() {
      return ((bitField0_ & 0x00000001) != 0);
    }
    /**
     * <code>required string volume_id = 1;</code>
     * @return The volumeId.
     */
    @java.lang.Override
    public java.lang.String getVolumeId() {
      java.lang.Object ref = volumeId_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          volumeId_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string volume_id = 1;</code>
     * @return The bytes for volumeId.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getVolumeIdBytes() {
      java.lang.Object ref = volumeId_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        volumeId_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    public static final int TARGET_PATH_FIELD_NUMBER = 2;
    @SuppressWarnings("serial")
    private volatile java.lang.Object targetPath_ = "";
    /**
     * <code>required string target_path = 2;</code>
     * @return Whether the targetPath field is set.
     */
    @java.lang.Override
    public boolean hasTargetPath() {
      return ((bitField0_ & 0x00000002) != 0);
    }
    /**
     * <code>required string target_path = 2;</code>
     * @return The targetPath.
     */
    @java.lang.Override
    public java.lang.String getTargetPath() {
      java.lang.Object ref = targetPath_;
      if (ref instanceof java.lang.String) {
        return (java.lang.String) ref;
      } else {
        org.apache.hadoop.thirdparty.protobuf.ByteString bs = 
            (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        java.lang.String s = bs.toStringUtf8();
        if (bs.isValidUtf8()) {
          targetPath_ = s;
        }
        return s;
      }
    }
    /**
     * <code>required string target_path = 2;</code>
     * @return The bytes for targetPath.
     */
    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.ByteString
        getTargetPathBytes() {
      java.lang.Object ref = targetPath_;
      if (ref instanceof java.lang.String) {
        org.apache.hadoop.thirdparty.protobuf.ByteString b = 
            org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                (java.lang.String) ref);
        targetPath_ = b;
        return b;
      } else {
        return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
      }
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      if (!hasVolumeId()) {
        memoizedIsInitialized = 0;
        return false;
      }
      if (!hasTargetPath()) {
        memoizedIsInitialized = 0;
        return false;
      }
      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      if (((bitField0_ & 0x00000001) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 1, volumeId_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.writeString(output, 2, targetPath_);
      }
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      if (((bitField0_ & 0x00000001) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(1, volumeId_);
      }
      if (((bitField0_ & 0x00000002) != 0)) {
        size += org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.computeStringSize(2, targetPath_);
      }
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest other = (org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest) obj;

      if (hasVolumeId() != other.hasVolumeId()) return false;
      if (hasVolumeId()) {
        if (!getVolumeId()
            .equals(other.getVolumeId())) return false;
      }
      if (hasTargetPath() != other.hasTargetPath()) return false;
      if (hasTargetPath()) {
        if (!getTargetPath()
            .equals(other.getTargetPath())) return false;
      }
      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      if (hasVolumeId()) {
        hash = (37 * hash) + VOLUME_ID_FIELD_NUMBER;
        hash = (53 * hash) + getVolumeId().hashCode();
      }
      if (hasTargetPath()) {
        hash = (37 * hash) + TARGET_PATH_FIELD_NUMBER;
        hash = (53 * hash) + getTargetPath().hashCode();
      }
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * Protobuf type {@code hadoop.yarn.NodeUnpublishVolumeRequest}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeUnpublishVolumeRequest)
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequestOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        bitField0_ = 0;
        volumeId_ = "";
        targetPath_ = "";
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest build() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest buildPartial() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest result = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest(this);
        if (bitField0_ != 0) { buildPartial0(result); }
        onBuilt();
        return result;
      }

      private void buildPartial0(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest result) {
        int from_bitField0_ = bitField0_;
        int to_bitField0_ = 0;
        if (((from_bitField0_ & 0x00000001) != 0)) {
          result.volumeId_ = volumeId_;
          to_bitField0_ |= 0x00000001;
        }
        if (((from_bitField0_ & 0x00000002) != 0)) {
          result.targetPath_ = targetPath_;
          to_bitField0_ |= 0x00000002;
        }
        result.bitField0_ |= to_bitField0_;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest) {
          return mergeFrom((org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest other) {
        if (other == org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest.getDefaultInstance()) return this;
        if (other.hasVolumeId()) {
          volumeId_ = other.volumeId_;
          bitField0_ |= 0x00000001;
          onChanged();
        }
        if (other.hasTargetPath()) {
          targetPath_ = other.targetPath_;
          bitField0_ |= 0x00000002;
          onChanged();
        }
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        if (!hasVolumeId()) {
          return false;
        }
        if (!hasTargetPath()) {
          return false;
        }
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              case 10: {
                volumeId_ = input.readBytes();
                bitField0_ |= 0x00000001;
                break;
              } // case 10
              case 18: {
                targetPath_ = input.readBytes();
                bitField0_ |= 0x00000002;
                break;
              } // case 18
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      private int bitField0_;

      private java.lang.Object volumeId_ = "";
      /**
       * <code>required string volume_id = 1;</code>
       * @return Whether the volumeId field is set.
       */
      public boolean hasVolumeId() {
        return ((bitField0_ & 0x00000001) != 0);
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @return The volumeId.
       */
      public java.lang.String getVolumeId() {
        java.lang.Object ref = volumeId_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            volumeId_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @return The bytes for volumeId.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getVolumeIdBytes() {
        java.lang.Object ref = volumeId_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          volumeId_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @param value The volumeId to set.
       * @return This builder for chaining.
       */
      public Builder setVolumeId(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        volumeId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @return This builder for chaining.
       */
      public Builder clearVolumeId() {
        volumeId_ = getDefaultInstance().getVolumeId();
        bitField0_ = (bitField0_ & ~0x00000001);
        onChanged();
        return this;
      }
      /**
       * <code>required string volume_id = 1;</code>
       * @param value The bytes for volumeId to set.
       * @return This builder for chaining.
       */
      public Builder setVolumeIdBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        volumeId_ = value;
        bitField0_ |= 0x00000001;
        onChanged();
        return this;
      }

      private java.lang.Object targetPath_ = "";
      /**
       * <code>required string target_path = 2;</code>
       * @return Whether the targetPath field is set.
       */
      public boolean hasTargetPath() {
        return ((bitField0_ & 0x00000002) != 0);
      }
      /**
       * <code>required string target_path = 2;</code>
       * @return The targetPath.
       */
      public java.lang.String getTargetPath() {
        java.lang.Object ref = targetPath_;
        if (!(ref instanceof java.lang.String)) {
          org.apache.hadoop.thirdparty.protobuf.ByteString bs =
              (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
          java.lang.String s = bs.toStringUtf8();
          if (bs.isValidUtf8()) {
            targetPath_ = s;
          }
          return s;
        } else {
          return (java.lang.String) ref;
        }
      }
      /**
       * <code>required string target_path = 2;</code>
       * @return The bytes for targetPath.
       */
      public org.apache.hadoop.thirdparty.protobuf.ByteString
          getTargetPathBytes() {
        java.lang.Object ref = targetPath_;
        if (ref instanceof String) {
          org.apache.hadoop.thirdparty.protobuf.ByteString b = 
              org.apache.hadoop.thirdparty.protobuf.ByteString.copyFromUtf8(
                  (java.lang.String) ref);
          targetPath_ = b;
          return b;
        } else {
          return (org.apache.hadoop.thirdparty.protobuf.ByteString) ref;
        }
      }
      /**
       * <code>required string target_path = 2;</code>
       * @param value The targetPath to set.
       * @return This builder for chaining.
       */
      public Builder setTargetPath(
          java.lang.String value) {
        if (value == null) { throw new NullPointerException(); }
        targetPath_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      /**
       * <code>required string target_path = 2;</code>
       * @return This builder for chaining.
       */
      public Builder clearTargetPath() {
        targetPath_ = getDefaultInstance().getTargetPath();
        bitField0_ = (bitField0_ & ~0x00000002);
        onChanged();
        return this;
      }
      /**
       * <code>required string target_path = 2;</code>
       * @param value The bytes for targetPath to set.
       * @return This builder for chaining.
       */
      public Builder setTargetPathBytes(
          org.apache.hadoop.thirdparty.protobuf.ByteString value) {
        if (value == null) { throw new NullPointerException(); }
        targetPath_ = value;
        bitField0_ |= 0x00000002;
        onChanged();
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeUnpublishVolumeRequest)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeUnpublishVolumeRequest)
    private static final org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest();
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeUnpublishVolumeRequest>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeUnpublishVolumeRequest>() {
      @java.lang.Override
      public NodeUnpublishVolumeRequest parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeUnpublishVolumeRequest> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeUnpublishVolumeRequest> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeRequest getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  public interface NodeUnpublishVolumeResponseOrBuilder extends
      // @@protoc_insertion_point(interface_extends:hadoop.yarn.NodeUnpublishVolumeResponse)
      org.apache.hadoop.thirdparty.protobuf.MessageOrBuilder {
  }
  /**
   * <pre>
   * Intentionally empty.
   * </pre>
   *
   * Protobuf type {@code hadoop.yarn.NodeUnpublishVolumeResponse}
   */
  public static final class NodeUnpublishVolumeResponse extends
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3 implements
      // @@protoc_insertion_point(message_implements:hadoop.yarn.NodeUnpublishVolumeResponse)
      NodeUnpublishVolumeResponseOrBuilder {
  private static final long serialVersionUID = 0L;
    // Use NodeUnpublishVolumeResponse.newBuilder() to construct.
    private NodeUnpublishVolumeResponse(org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<?> builder) {
      super(builder);
    }
    private NodeUnpublishVolumeResponse() {
    }

    @java.lang.Override
    @SuppressWarnings({"unused"})
    protected java.lang.Object newInstance(
        UnusedPrivateParameter unused) {
      return new NodeUnpublishVolumeResponse();
    }

    public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor;
    }

    @java.lang.Override
    protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
        internalGetFieldAccessorTable() {
      return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_fieldAccessorTable
          .ensureFieldAccessorsInitialized(
              org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.Builder.class);
    }

    private byte memoizedIsInitialized = -1;
    @java.lang.Override
    public final boolean isInitialized() {
      byte isInitialized = memoizedIsInitialized;
      if (isInitialized == 1) return true;
      if (isInitialized == 0) return false;

      memoizedIsInitialized = 1;
      return true;
    }

    @java.lang.Override
    public void writeTo(org.apache.hadoop.thirdparty.protobuf.CodedOutputStream output)
                        throws java.io.IOException {
      getUnknownFields().writeTo(output);
    }

    @java.lang.Override
    public int getSerializedSize() {
      int size = memoizedSize;
      if (size != -1) return size;

      size = 0;
      size += getUnknownFields().getSerializedSize();
      memoizedSize = size;
      return size;
    }

    @java.lang.Override
    public boolean equals(final java.lang.Object obj) {
      if (obj == this) {
       return true;
      }
      if (!(obj instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse)) {
        return super.equals(obj);
      }
      org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse other = (org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse) obj;

      if (!getUnknownFields().equals(other.getUnknownFields())) return false;
      return true;
    }

    @java.lang.Override
    public int hashCode() {
      if (memoizedHashCode != 0) {
        return memoizedHashCode;
      }
      int hash = 41;
      hash = (19 * hash) + getDescriptor().hashCode();
      hash = (29 * hash) + getUnknownFields().hashCode();
      memoizedHashCode = hash;
      return hash;
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
        java.nio.ByteBuffer data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
        java.nio.ByteBuffer data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.ByteString data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(byte[] data)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
        byte[] data,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
      return PARSER.parseFrom(data, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseDelimitedFrom(java.io.InputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input);
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseDelimitedFrom(
        java.io.InputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseDelimitedWithIOException(PARSER, input, extensionRegistry);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input);
    }
    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse parseFrom(
        org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
        org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
        throws java.io.IOException {
      return org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3
          .parseWithIOException(PARSER, input, extensionRegistry);
    }

    @java.lang.Override
    public Builder newBuilderForType() { return newBuilder(); }
    public static Builder newBuilder() {
      return DEFAULT_INSTANCE.toBuilder();
    }
    public static Builder newBuilder(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse prototype) {
      return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
    }
    @java.lang.Override
    public Builder toBuilder() {
      return this == DEFAULT_INSTANCE
          ? new Builder() : new Builder().mergeFrom(this);
    }

    @java.lang.Override
    protected Builder newBuilderForType(
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
      Builder builder = new Builder(parent);
      return builder;
    }
    /**
     * <pre>
     * Intentionally empty.
     * </pre>
     *
     * Protobuf type {@code hadoop.yarn.NodeUnpublishVolumeResponse}
     */
    public static final class Builder extends
        org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.Builder<Builder> implements
        // @@protoc_insertion_point(builder_implements:hadoop.yarn.NodeUnpublishVolumeResponse)
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponseOrBuilder {
      public static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptor() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor;
      }

      @java.lang.Override
      protected org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
          internalGetFieldAccessorTable() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_fieldAccessorTable
            .ensureFieldAccessorsInitialized(
                org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.class, org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.Builder.class);
      }

      // Construct using org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.newBuilder()
      private Builder() {

      }

      private Builder(
          org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.BuilderParent parent) {
        super(parent);

      }
      @java.lang.Override
      public Builder clear() {
        super.clear();
        return this;
      }

      @java.lang.Override
      public org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
          getDescriptorForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse getDefaultInstanceForType() {
        return org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.getDefaultInstance();
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse build() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse result = buildPartial();
        if (!result.isInitialized()) {
          throw newUninitializedMessageException(result);
        }
        return result;
      }

      @java.lang.Override
      public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse buildPartial() {
        org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse result = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse(this);
        onBuilt();
        return result;
      }

      @java.lang.Override
      public Builder clone() {
        return super.clone();
      }
      @java.lang.Override
      public Builder setField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.setField(field, value);
      }
      @java.lang.Override
      public Builder clearField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field) {
        return super.clearField(field);
      }
      @java.lang.Override
      public Builder clearOneof(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.OneofDescriptor oneof) {
        return super.clearOneof(oneof);
      }
      @java.lang.Override
      public Builder setRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          int index, java.lang.Object value) {
        return super.setRepeatedField(field, index, value);
      }
      @java.lang.Override
      public Builder addRepeatedField(
          org.apache.hadoop.thirdparty.protobuf.Descriptors.FieldDescriptor field,
          java.lang.Object value) {
        return super.addRepeatedField(field, value);
      }
      @java.lang.Override
      public Builder mergeFrom(org.apache.hadoop.thirdparty.protobuf.Message other) {
        if (other instanceof org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse) {
          return mergeFrom((org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse)other);
        } else {
          super.mergeFrom(other);
          return this;
        }
      }

      public Builder mergeFrom(org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse other) {
        if (other == org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse.getDefaultInstance()) return this;
        this.mergeUnknownFields(other.getUnknownFields());
        onChanged();
        return this;
      }

      @java.lang.Override
      public final boolean isInitialized() {
        return true;
      }

      @java.lang.Override
      public Builder mergeFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws java.io.IOException {
        if (extensionRegistry == null) {
          throw new java.lang.NullPointerException();
        }
        try {
          boolean done = false;
          while (!done) {
            int tag = input.readTag();
            switch (tag) {
              case 0:
                done = true;
                break;
              default: {
                if (!super.parseUnknownField(input, extensionRegistry, tag)) {
                  done = true; // was an endgroup tag
                }
                break;
              } // default:
            } // switch (tag)
          } // while (!done)
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.unwrapIOException();
        } finally {
          onChanged();
        } // finally
        return this;
      }
      @java.lang.Override
      public final Builder setUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.setUnknownFields(unknownFields);
      }

      @java.lang.Override
      public final Builder mergeUnknownFields(
          final org.apache.hadoop.thirdparty.protobuf.UnknownFieldSet unknownFields) {
        return super.mergeUnknownFields(unknownFields);
      }


      // @@protoc_insertion_point(builder_scope:hadoop.yarn.NodeUnpublishVolumeResponse)
    }

    // @@protoc_insertion_point(class_scope:hadoop.yarn.NodeUnpublishVolumeResponse)
    private static final org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse DEFAULT_INSTANCE;
    static {
      DEFAULT_INSTANCE = new org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse();
    }

    public static org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse getDefaultInstance() {
      return DEFAULT_INSTANCE;
    }

    @java.lang.Deprecated public static final org.apache.hadoop.thirdparty.protobuf.Parser<NodeUnpublishVolumeResponse>
        PARSER = new org.apache.hadoop.thirdparty.protobuf.AbstractParser<NodeUnpublishVolumeResponse>() {
      @java.lang.Override
      public NodeUnpublishVolumeResponse parsePartialFrom(
          org.apache.hadoop.thirdparty.protobuf.CodedInputStream input,
          org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite extensionRegistry)
          throws org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException {
        Builder builder = newBuilder();
        try {
          builder.mergeFrom(input, extensionRegistry);
        } catch (org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException e) {
          throw e.setUnfinishedMessage(builder.buildPartial());
        } catch (org.apache.hadoop.thirdparty.protobuf.UninitializedMessageException e) {
          throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
        } catch (java.io.IOException e) {
          throw new org.apache.hadoop.thirdparty.protobuf.InvalidProtocolBufferException(e)
              .setUnfinishedMessage(builder.buildPartial());
        }
        return builder.buildPartial();
      }
    };

    public static org.apache.hadoop.thirdparty.protobuf.Parser<NodeUnpublishVolumeResponse> parser() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.thirdparty.protobuf.Parser<NodeUnpublishVolumeResponse> getParserForType() {
      return PARSER;
    }

    @java.lang.Override
    public org.apache.hadoop.yarn.proto.CsiAdaptorProtos.NodeUnpublishVolumeResponse getDefaultInstanceForType() {
      return DEFAULT_INSTANCE;
    }

  }

  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_VolumeCapability_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_VolumeCapability_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetPluginInfoRequest_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_GetPluginInfoResponse_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodePublishVolumeRequest_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodePublishVolumeResponse_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_fieldAccessorTable;
  private static final org.apache.hadoop.thirdparty.protobuf.Descriptors.Descriptor
    internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor;
  private static final 
    org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable
      internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_fieldAccessorTable;

  public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static  org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\026yarn_csi_adaptor.proto\022\013hadoop.yarn\032\021y" +
      "arn_protos.proto\"\260\001\n!ValidateVolumeCapab" +
      "ilitiesRequest\022\021\n\tvolume_id\030\001 \002(\t\022:\n\023vol" +
      "ume_capabilities\030\002 \003(\0132\035.hadoop.yarn.Vol" +
      "umeCapability\022<\n\021volume_attributes\030\003 \003(\013" +
      "2!.hadoop.yarn.StringStringMapProto\"H\n\"V" +
      "alidateVolumeCapabilitiesResponse\022\021\n\tsup" +
      "ported\030\001 \002(\010\022\017\n\007message\030\002 \001(\t\"\367\002\n\020Volume" +
      "Capability\022=\n\013volume_type\030\001 \002(\0162(.hadoop" +
      ".yarn.VolumeCapability.VolumeType\022=\n\013acc" +
      "ess_mode\030\002 \002(\0162(.hadoop.yarn.VolumeCapab" +
      "ility.AccessMode\022\023\n\013mount_flags\030\003 \003(\t\"(\n" +
      "\nVolumeType\022\t\n\005BLOCK\020\000\022\017\n\013FILE_SYSTEM\020\001\"" +
      "\245\001\n\nAccessMode\022\013\n\007UNKNOWN\020\000\022\026\n\022SINGLE_NO" +
      "DE_WRITER\020\001\022\033\n\027SINGLE_NODE_READER_ONLY\020\002" +
      "\022\032\n\026MULTI_NODE_READER_ONLY\020\003\022\034\n\030MULTI_NO" +
      "DE_SINGLE_WRITER\020\004\022\033\n\027MULTI_NODE_MULTI_W" +
      "RITER\020\005\"\026\n\024GetPluginInfoRequest\"=\n\025GetPl" +
      "uginInfoResponse\022\014\n\004name\030\001 \002(\t\022\026\n\016vendor" +
      "_version\030\002 \002(\t\"\326\002\n\030NodePublishVolumeRequ" +
      "est\022\021\n\tvolume_id\030\001 \002(\t\022:\n\017publish_contex" +
      "t\030\002 \003(\0132!.hadoop.yarn.StringStringMapPro" +
      "to\022\033\n\023staging_target_path\030\003 \001(\t\022\023\n\013targe" +
      "t_path\030\004 \002(\t\0228\n\021volume_capability\030\005 \002(\0132" +
      "\035.hadoop.yarn.VolumeCapability\022\020\n\010readon" +
      "ly\030\006 \002(\010\0222\n\007secrets\030\007 \003(\0132!.hadoop.yarn." +
      "StringStringMapProto\0229\n\016volume_context\030\010" +
      " \003(\0132!.hadoop.yarn.StringStringMapProto\"" +
      "\033\n\031NodePublishVolumeResponse\"D\n\032NodeUnpu" +
      "blishVolumeRequest\022\021\n\tvolume_id\030\001 \002(\t\022\023\n" +
      "\013target_path\030\002 \002(\t\"\035\n\033NodeUnpublishVolum" +
      "eResponseB3\n\034org.apache.hadoop.yarn.prot" +
      "oB\020CsiAdaptorProtos\240\001\001"
    };
    descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor(),
        });
    internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor =
      getDescriptor().getMessageTypes().get(0);
    internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ValidateVolumeCapabilitiesRequest_descriptor,
        new java.lang.String[] { "VolumeId", "VolumeCapabilities", "VolumeAttributes", });
    internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor =
      getDescriptor().getMessageTypes().get(1);
    internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_ValidateVolumeCapabilitiesResponse_descriptor,
        new java.lang.String[] { "Supported", "Message", });
    internal_static_hadoop_yarn_VolumeCapability_descriptor =
      getDescriptor().getMessageTypes().get(2);
    internal_static_hadoop_yarn_VolumeCapability_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_VolumeCapability_descriptor,
        new java.lang.String[] { "VolumeType", "AccessMode", "MountFlags", });
    internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor =
      getDescriptor().getMessageTypes().get(3);
    internal_static_hadoop_yarn_GetPluginInfoRequest_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetPluginInfoRequest_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor =
      getDescriptor().getMessageTypes().get(4);
    internal_static_hadoop_yarn_GetPluginInfoResponse_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_GetPluginInfoResponse_descriptor,
        new java.lang.String[] { "Name", "VendorVersion", });
    internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor =
      getDescriptor().getMessageTypes().get(5);
    internal_static_hadoop_yarn_NodePublishVolumeRequest_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodePublishVolumeRequest_descriptor,
        new java.lang.String[] { "VolumeId", "PublishContext", "StagingTargetPath", "TargetPath", "VolumeCapability", "Readonly", "Secrets", "VolumeContext", });
    internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor =
      getDescriptor().getMessageTypes().get(6);
    internal_static_hadoop_yarn_NodePublishVolumeResponse_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodePublishVolumeResponse_descriptor,
        new java.lang.String[] { });
    internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor =
      getDescriptor().getMessageTypes().get(7);
    internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeUnpublishVolumeRequest_descriptor,
        new java.lang.String[] { "VolumeId", "TargetPath", });
    internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor =
      getDescriptor().getMessageTypes().get(8);
    internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_fieldAccessorTable = new
      org.apache.hadoop.thirdparty.protobuf.GeneratedMessageV3.FieldAccessorTable(
        internal_static_hadoop_yarn_NodeUnpublishVolumeResponse_descriptor,
        new java.lang.String[] { });
    org.apache.hadoop.yarn.proto.YarnProtos.getDescriptor();
  }

  // @@protoc_insertion_point(outer_class_scope)
}