MRClientProtocol.java

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: MRClientProtocol.proto

// Protobuf Java Version: 3.25.5
package org.apache.hadoop.yarn.proto;

public final class MRClientProtocol {
  private MRClientProtocol() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  /**
   * <pre>
   * If making changes to this, please edit HSClientProtocolService 
   * </pre>
   *
   * Protobuf service {@code hadoop.mapreduce.MRClientProtocolService}
   */
  public static abstract class MRClientProtocolService
      implements org.apache.hadoop.thirdparty.protobuf.Service {
    protected MRClientProtocolService() {}

    public interface Interface {
      /**
       * <code>rpc getJobReport(.hadoop.mapreduce.GetJobReportRequestProto) returns (.hadoop.mapreduce.GetJobReportResponseProto);</code>
       */
      public abstract void getJobReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto> done);

      /**
       * <code>rpc getTaskReport(.hadoop.mapreduce.GetTaskReportRequestProto) returns (.hadoop.mapreduce.GetTaskReportResponseProto);</code>
       */
      public abstract void getTaskReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto> done);

      /**
       * <code>rpc getTaskAttemptReport(.hadoop.mapreduce.GetTaskAttemptReportRequestProto) returns (.hadoop.mapreduce.GetTaskAttemptReportResponseProto);</code>
       */
      public abstract void getTaskAttemptReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto> done);

      /**
       * <code>rpc getCounters(.hadoop.mapreduce.GetCountersRequestProto) returns (.hadoop.mapreduce.GetCountersResponseProto);</code>
       */
      public abstract void getCounters(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto> done);

      /**
       * <code>rpc getTaskAttemptCompletionEvents(.hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto) returns (.hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto);</code>
       */
      public abstract void getTaskAttemptCompletionEvents(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto> done);

      /**
       * <code>rpc getTaskReports(.hadoop.mapreduce.GetTaskReportsRequestProto) returns (.hadoop.mapreduce.GetTaskReportsResponseProto);</code>
       */
      public abstract void getTaskReports(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto> done);

      /**
       * <code>rpc getDiagnostics(.hadoop.mapreduce.GetDiagnosticsRequestProto) returns (.hadoop.mapreduce.GetDiagnosticsResponseProto);</code>
       */
      public abstract void getDiagnostics(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto> done);

      /**
       * <code>rpc getDelegationToken(.hadoop.common.GetDelegationTokenRequestProto) returns (.hadoop.common.GetDelegationTokenResponseProto);</code>
       */
      public abstract void getDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto> done);

      /**
       * <code>rpc killJob(.hadoop.mapreduce.KillJobRequestProto) returns (.hadoop.mapreduce.KillJobResponseProto);</code>
       */
      public abstract void killJob(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto> done);

      /**
       * <code>rpc killTask(.hadoop.mapreduce.KillTaskRequestProto) returns (.hadoop.mapreduce.KillTaskResponseProto);</code>
       */
      public abstract void killTask(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto> done);

      /**
       * <code>rpc killTaskAttempt(.hadoop.mapreduce.KillTaskAttemptRequestProto) returns (.hadoop.mapreduce.KillTaskAttemptResponseProto);</code>
       */
      public abstract void killTaskAttempt(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto> done);

      /**
       * <code>rpc failTaskAttempt(.hadoop.mapreduce.FailTaskAttemptRequestProto) returns (.hadoop.mapreduce.FailTaskAttemptResponseProto);</code>
       */
      public abstract void failTaskAttempt(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto> done);

      /**
       * <code>rpc renewDelegationToken(.hadoop.common.RenewDelegationTokenRequestProto) returns (.hadoop.common.RenewDelegationTokenResponseProto);</code>
       */
      public abstract void renewDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto> done);

      /**
       * <code>rpc cancelDelegationToken(.hadoop.common.CancelDelegationTokenRequestProto) returns (.hadoop.common.CancelDelegationTokenResponseProto);</code>
       */
      public abstract void cancelDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto> done);

    }

    public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new MRClientProtocolService() {
        @java.lang.Override
        public  void getJobReport(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto> done) {
          impl.getJobReport(controller, request, done);
        }

        @java.lang.Override
        public  void getTaskReport(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto> done) {
          impl.getTaskReport(controller, request, done);
        }

        @java.lang.Override
        public  void getTaskAttemptReport(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto> done) {
          impl.getTaskAttemptReport(controller, request, done);
        }

        @java.lang.Override
        public  void getCounters(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto> done) {
          impl.getCounters(controller, request, done);
        }

        @java.lang.Override
        public  void getTaskAttemptCompletionEvents(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto> done) {
          impl.getTaskAttemptCompletionEvents(controller, request, done);
        }

        @java.lang.Override
        public  void getTaskReports(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto> done) {
          impl.getTaskReports(controller, request, done);
        }

        @java.lang.Override
        public  void getDiagnostics(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto> done) {
          impl.getDiagnostics(controller, request, done);
        }

        @java.lang.Override
        public  void getDelegationToken(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto> done) {
          impl.getDelegationToken(controller, request, done);
        }

        @java.lang.Override
        public  void killJob(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto> done) {
          impl.killJob(controller, request, done);
        }

        @java.lang.Override
        public  void killTask(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto> done) {
          impl.killTask(controller, request, done);
        }

        @java.lang.Override
        public  void killTaskAttempt(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto> done) {
          impl.killTaskAttempt(controller, request, done);
        }

        @java.lang.Override
        public  void failTaskAttempt(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto> done) {
          impl.failTaskAttempt(controller, request, done);
        }

        @java.lang.Override
        public  void renewDelegationToken(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto> done) {
          impl.renewDelegationToken(controller, request, done);
        }

        @java.lang.Override
        public  void cancelDelegationToken(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto> done) {
          impl.cancelDelegationToken(controller, request, done);
        }

      };
    }

    public static org.apache.hadoop.thirdparty.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new org.apache.hadoop.thirdparty.protobuf.BlockingService() {
        public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.thirdparty.protobuf.Message request)
            throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.getJobReport(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto)request);
            case 1:
              return impl.getTaskReport(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto)request);
            case 2:
              return impl.getTaskAttemptReport(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto)request);
            case 3:
              return impl.getCounters(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto)request);
            case 4:
              return impl.getTaskAttemptCompletionEvents(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto)request);
            case 5:
              return impl.getTaskReports(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto)request);
            case 6:
              return impl.getDiagnostics(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto)request);
            case 7:
              return impl.getDelegationToken(controller, (org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto)request);
            case 8:
              return impl.killJob(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto)request);
            case 9:
              return impl.killTask(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto)request);
            case 10:
              return impl.killTaskAttempt(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto)request);
            case 11:
              return impl.failTaskAttempt(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto)request);
            case 12:
              return impl.renewDelegationToken(controller, (org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto)request);
            case 13:
              return impl.cancelDelegationToken(controller, (org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getRequestPrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.getDefaultInstance();
            case 3:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.getDefaultInstance();
            case 4:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.getDefaultInstance();
            case 5:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.getDefaultInstance();
            case 6:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.getDefaultInstance();
            case 7:
              return org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto.getDefaultInstance();
            case 8:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.getDefaultInstance();
            case 9:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.getDefaultInstance();
            case 10:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.getDefaultInstance();
            case 11:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.getDefaultInstance();
            case 12:
              return org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto.getDefaultInstance();
            case 13:
              return org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getResponsePrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.getDefaultInstance();
            case 3:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.getDefaultInstance();
            case 4:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.getDefaultInstance();
            case 5:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.getDefaultInstance();
            case 6:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.getDefaultInstance();
            case 7:
              return org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto.getDefaultInstance();
            case 8:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.getDefaultInstance();
            case 9:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.getDefaultInstance();
            case 10:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.getDefaultInstance();
            case 11:
              return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.getDefaultInstance();
            case 12:
              return org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto.getDefaultInstance();
            case 13:
              return org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc getJobReport(.hadoop.mapreduce.GetJobReportRequestProto) returns (.hadoop.mapreduce.GetJobReportResponseProto);</code>
     */
    public abstract void getJobReport(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto> done);

    /**
     * <code>rpc getTaskReport(.hadoop.mapreduce.GetTaskReportRequestProto) returns (.hadoop.mapreduce.GetTaskReportResponseProto);</code>
     */
    public abstract void getTaskReport(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto> done);

    /**
     * <code>rpc getTaskAttemptReport(.hadoop.mapreduce.GetTaskAttemptReportRequestProto) returns (.hadoop.mapreduce.GetTaskAttemptReportResponseProto);</code>
     */
    public abstract void getTaskAttemptReport(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto> done);

    /**
     * <code>rpc getCounters(.hadoop.mapreduce.GetCountersRequestProto) returns (.hadoop.mapreduce.GetCountersResponseProto);</code>
     */
    public abstract void getCounters(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto> done);

    /**
     * <code>rpc getTaskAttemptCompletionEvents(.hadoop.mapreduce.GetTaskAttemptCompletionEventsRequestProto) returns (.hadoop.mapreduce.GetTaskAttemptCompletionEventsResponseProto);</code>
     */
    public abstract void getTaskAttemptCompletionEvents(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto> done);

    /**
     * <code>rpc getTaskReports(.hadoop.mapreduce.GetTaskReportsRequestProto) returns (.hadoop.mapreduce.GetTaskReportsResponseProto);</code>
     */
    public abstract void getTaskReports(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto> done);

    /**
     * <code>rpc getDiagnostics(.hadoop.mapreduce.GetDiagnosticsRequestProto) returns (.hadoop.mapreduce.GetDiagnosticsResponseProto);</code>
     */
    public abstract void getDiagnostics(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto> done);

    /**
     * <code>rpc getDelegationToken(.hadoop.common.GetDelegationTokenRequestProto) returns (.hadoop.common.GetDelegationTokenResponseProto);</code>
     */
    public abstract void getDelegationToken(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto> done);

    /**
     * <code>rpc killJob(.hadoop.mapreduce.KillJobRequestProto) returns (.hadoop.mapreduce.KillJobResponseProto);</code>
     */
    public abstract void killJob(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto> done);

    /**
     * <code>rpc killTask(.hadoop.mapreduce.KillTaskRequestProto) returns (.hadoop.mapreduce.KillTaskResponseProto);</code>
     */
    public abstract void killTask(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto> done);

    /**
     * <code>rpc killTaskAttempt(.hadoop.mapreduce.KillTaskAttemptRequestProto) returns (.hadoop.mapreduce.KillTaskAttemptResponseProto);</code>
     */
    public abstract void killTaskAttempt(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto> done);

    /**
     * <code>rpc failTaskAttempt(.hadoop.mapreduce.FailTaskAttemptRequestProto) returns (.hadoop.mapreduce.FailTaskAttemptResponseProto);</code>
     */
    public abstract void failTaskAttempt(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto> done);

    /**
     * <code>rpc renewDelegationToken(.hadoop.common.RenewDelegationTokenRequestProto) returns (.hadoop.common.RenewDelegationTokenResponseProto);</code>
     */
    public abstract void renewDelegationToken(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto> done);

    /**
     * <code>rpc cancelDelegationToken(.hadoop.common.CancelDelegationTokenRequestProto) returns (.hadoop.common.CancelDelegationTokenResponseProto);</code>
     */
    public abstract void cancelDelegationToken(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto> done);

    public static final
        org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.yarn.proto.MRClientProtocol.getDescriptor().getServices().get(0);
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.thirdparty.protobuf.Message request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<
          org.apache.hadoop.thirdparty.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.getJobReport(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.getTaskReport(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto>specializeCallback(
              done));
          return;
        case 2:
          this.getTaskAttemptReport(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto>specializeCallback(
              done));
          return;
        case 3:
          this.getCounters(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto>specializeCallback(
              done));
          return;
        case 4:
          this.getTaskAttemptCompletionEvents(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto>specializeCallback(
              done));
          return;
        case 5:
          this.getTaskReports(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto>specializeCallback(
              done));
          return;
        case 6:
          this.getDiagnostics(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto>specializeCallback(
              done));
          return;
        case 7:
          this.getDelegationToken(controller, (org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto>specializeCallback(
              done));
          return;
        case 8:
          this.killJob(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto>specializeCallback(
              done));
          return;
        case 9:
          this.killTask(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto>specializeCallback(
              done));
          return;
        case 10:
          this.killTaskAttempt(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto>specializeCallback(
              done));
          return;
        case 11:
          this.failTaskAttempt(controller, (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto>specializeCallback(
              done));
          return;
        case 12:
          this.renewDelegationToken(controller, (org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto>specializeCallback(
              done));
          return;
        case 13:
          this.cancelDelegationToken(controller, (org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getRequestPrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto.getDefaultInstance();
        case 3:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto.getDefaultInstance();
        case 4:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto.getDefaultInstance();
        case 5:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto.getDefaultInstance();
        case 6:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto.getDefaultInstance();
        case 7:
          return org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto.getDefaultInstance();
        case 8:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto.getDefaultInstance();
        case 9:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto.getDefaultInstance();
        case 10:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto.getDefaultInstance();
        case 11:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto.getDefaultInstance();
        case 12:
          return org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto.getDefaultInstance();
        case 13:
          return org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getResponsePrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.getDefaultInstance();
        case 3:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.getDefaultInstance();
        case 4:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.getDefaultInstance();
        case 5:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.getDefaultInstance();
        case 6:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.getDefaultInstance();
        case 7:
          return org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto.getDefaultInstance();
        case 8:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.getDefaultInstance();
        case 9:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.getDefaultInstance();
        case 10:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.getDefaultInstance();
        case 11:
          return org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.getDefaultInstance();
        case 12:
          return org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto.getDefaultInstance();
        case 13:
          return org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.yarn.proto.MRClientProtocol.MRClientProtocolService implements Interface {
      private Stub(org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;

      public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void getJobReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.getDefaultInstance()));
      }

      public  void getTaskReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.getDefaultInstance()));
      }

      public  void getTaskAttemptReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.getDefaultInstance()));
      }

      public  void getCounters(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(3),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.getDefaultInstance()));
      }

      public  void getTaskAttemptCompletionEvents(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(4),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.getDefaultInstance()));
      }

      public  void getTaskReports(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(5),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.getDefaultInstance()));
      }

      public  void getDiagnostics(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(6),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.getDefaultInstance()));
      }

      public  void getDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(7),
          controller,
          request,
          org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto.class,
            org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto.getDefaultInstance()));
      }

      public  void killJob(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(8),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.getDefaultInstance()));
      }

      public  void killTask(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(9),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.getDefaultInstance()));
      }

      public  void killTaskAttempt(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(10),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.getDefaultInstance()));
      }

      public  void failTaskAttempt(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(11),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.class,
            org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.getDefaultInstance()));
      }

      public  void renewDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(12),
          controller,
          request,
          org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto.class,
            org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto.getDefaultInstance()));
      }

      public  void cancelDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(13),
          controller,
          request,
          org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto.class,
            org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto getJobReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto getTaskReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto getTaskAttemptReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto getCounters(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto getTaskAttemptCompletionEvents(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto getTaskReports(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto getDiagnostics(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto getDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto killJob(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto killTask(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto killTaskAttempt(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto failTaskAttempt(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto renewDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto cancelDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto getJobReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto getTaskReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto getTaskAttemptReport(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto getCounters(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(3),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto getTaskAttemptCompletionEvents(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(4),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto getTaskReports(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(5),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto getDiagnostics(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(6),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto getDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(7),
          controller,
          request,
          org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto killJob(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(8),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto killTask(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(9),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto killTaskAttempt(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(10),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto failTaskAttempt(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(11),
          controller,
          request,
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto renewDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(12),
          controller,
          request,
          org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto cancelDelegationToken(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(13),
          controller,
          request,
          org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.mapreduce.MRClientProtocolService)
  }


  public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static  org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\026MRClientProtocol.proto\022\020hadoop.mapredu" +
      "ce\032\016Security.proto\032\027mr_service_protos.pr" +
      "oto2\334\014\n\027MRClientProtocolService\022g\n\014getJo" +
      "bReport\022*.hadoop.mapreduce.GetJobReportR" +
      "equestProto\032+.hadoop.mapreduce.GetJobRep" +
      "ortResponseProto\022j\n\rgetTaskReport\022+.hado" +
      "op.mapreduce.GetTaskReportRequestProto\032," +
      ".hadoop.mapreduce.GetTaskReportResponseP" +
      "roto\022\177\n\024getTaskAttemptReport\0222.hadoop.ma" +
      "preduce.GetTaskAttemptReportRequestProto" +
      "\0323.hadoop.mapreduce.GetTaskAttemptReport" +
      "ResponseProto\022d\n\013getCounters\022).hadoop.ma" +
      "preduce.GetCountersRequestProto\032*.hadoop" +
      ".mapreduce.GetCountersResponseProto\022\235\001\n\036" +
      "getTaskAttemptCompletionEvents\022<.hadoop." +
      "mapreduce.GetTaskAttemptCompletionEvents" +
      "RequestProto\032=.hadoop.mapreduce.GetTaskA" +
      "ttemptCompletionEventsResponseProto\022m\n\016g" +
      "etTaskReports\022,.hadoop.mapreduce.GetTask" +
      "ReportsRequestProto\032-.hadoop.mapreduce.G" +
      "etTaskReportsResponseProto\022m\n\016getDiagnos" +
      "tics\022,.hadoop.mapreduce.GetDiagnosticsRe" +
      "questProto\032-.hadoop.mapreduce.GetDiagnos" +
      "ticsResponseProto\022s\n\022getDelegationToken\022" +
      "-.hadoop.common.GetDelegationTokenReques" +
      "tProto\032..hadoop.common.GetDelegationToke" +
      "nResponseProto\022X\n\007killJob\022%.hadoop.mapre" +
      "duce.KillJobRequestProto\032&.hadoop.mapred" +
      "uce.KillJobResponseProto\022[\n\010killTask\022&.h" +
      "adoop.mapreduce.KillTaskRequestProto\032\'.h" +
      "adoop.mapreduce.KillTaskResponseProto\022p\n" +
      "\017killTaskAttempt\022-.hadoop.mapreduce.Kill" +
      "TaskAttemptRequestProto\032..hadoop.mapredu" +
      "ce.KillTaskAttemptResponseProto\022p\n\017failT" +
      "askAttempt\022-.hadoop.mapreduce.FailTaskAt" +
      "temptRequestProto\032..hadoop.mapreduce.Fai" +
      "lTaskAttemptResponseProto\022y\n\024renewDelega" +
      "tionToken\022/.hadoop.common.RenewDelegatio" +
      "nTokenRequestProto\0320.hadoop.common.Renew" +
      "DelegationTokenResponseProto\022|\n\025cancelDe" +
      "legationToken\0220.hadoop.common.CancelDele" +
      "gationTokenRequestProto\0321.hadoop.common." +
      "CancelDelegationTokenResponseProtoB3\n\034or" +
      "g.apache.hadoop.yarn.protoB\020MRClientProt" +
      "ocol\210\001\001"
    };
    descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.security.proto.SecurityProtos.getDescriptor(),
          org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.getDescriptor(),
        });
    org.apache.hadoop.security.proto.SecurityProtos.getDescriptor();
    org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.getDescriptor();
  }

  // @@protoc_insertion_point(outer_class_scope)
}