TestRpcServiceProtos.java

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: test_rpc_service.proto

// Protobuf Java Version: 3.25.5
package org.apache.hadoop.ipc.protobuf;

public final class TestRpcServiceProtos {
  private TestRpcServiceProtos() {}
  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite registry) {
  }

  public static void registerAllExtensions(
      org.apache.hadoop.thirdparty.protobuf.ExtensionRegistry registry) {
    registerAllExtensions(
        (org.apache.hadoop.thirdparty.protobuf.ExtensionRegistryLite) registry);
  }
  /**
   * <pre>
   **
   * A protobuf service for use in tests
   * </pre>
   *
   * Protobuf service {@code hadoop.common.TestProtobufRpcProto}
   */
  public static abstract class TestProtobufRpcProto
      implements org.apache.hadoop.thirdparty.protobuf.Service {
    protected TestProtobufRpcProto() {}

    public interface Interface {
      /**
       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc echo(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
       */
      public abstract void echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done);

      /**
       * <code>rpc error(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void error(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc error2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void error2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc slowPing(.hadoop.common.SlowPingRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void slowPing(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc echo2(.hadoop.common.EchoRequestProto2) returns (.hadoop.common.EchoResponseProto2);</code>
       */
      public abstract void echo2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2> done);

      /**
       * <code>rpc add(.hadoop.common.AddRequestProto) returns (.hadoop.common.AddResponseProto);</code>
       */
      public abstract void add(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto> done);

      /**
       * <code>rpc add2(.hadoop.common.AddRequestProto2) returns (.hadoop.common.AddResponseProto);</code>
       */
      public abstract void add2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto> done);

      /**
       * <code>rpc testServerGet(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void testServerGet(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc exchange(.hadoop.common.ExchangeRequestProto) returns (.hadoop.common.ExchangeResponseProto);</code>
       */
      public abstract void exchange(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto> done);

      /**
       * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc lockAndSleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void lockAndSleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc getAuthMethod(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthMethodResponseProto);</code>
       */
      public abstract void getAuthMethod(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto> done);

      /**
       * <code>rpc getAuthUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
       */
      public abstract void getAuthUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done);

      /**
       * <code>rpc echoPostponed(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
       */
      public abstract void echoPostponed(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done);

      /**
       * <code>rpc sendPostponed(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void sendPostponed(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc getCurrentUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
       */
      public abstract void getCurrentUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done);

      /**
       * <code>rpc getServerRemoteUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
       */
      public abstract void getServerRemoteUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done);

    }

    public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new TestProtobufRpcProto() {
        @java.lang.Override
        public  void ping(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.ping(controller, request, done);
        }

        @java.lang.Override
        public  void echo(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done) {
          impl.echo(controller, request, done);
        }

        @java.lang.Override
        public  void error(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.error(controller, request, done);
        }

        @java.lang.Override
        public  void error2(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.error2(controller, request, done);
        }

        @java.lang.Override
        public  void slowPing(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.slowPing(controller, request, done);
        }

        @java.lang.Override
        public  void echo2(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2> done) {
          impl.echo2(controller, request, done);
        }

        @java.lang.Override
        public  void add(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto> done) {
          impl.add(controller, request, done);
        }

        @java.lang.Override
        public  void add2(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto> done) {
          impl.add2(controller, request, done);
        }

        @java.lang.Override
        public  void testServerGet(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.testServerGet(controller, request, done);
        }

        @java.lang.Override
        public  void exchange(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto> done) {
          impl.exchange(controller, request, done);
        }

        @java.lang.Override
        public  void sleep(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.sleep(controller, request, done);
        }

        @java.lang.Override
        public  void lockAndSleep(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.lockAndSleep(controller, request, done);
        }

        @java.lang.Override
        public  void getAuthMethod(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto> done) {
          impl.getAuthMethod(controller, request, done);
        }

        @java.lang.Override
        public  void getAuthUser(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done) {
          impl.getAuthUser(controller, request, done);
        }

        @java.lang.Override
        public  void echoPostponed(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done) {
          impl.echoPostponed(controller, request, done);
        }

        @java.lang.Override
        public  void sendPostponed(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.sendPostponed(controller, request, done);
        }

        @java.lang.Override
        public  void getCurrentUser(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done) {
          impl.getCurrentUser(controller, request, done);
        }

        @java.lang.Override
        public  void getServerRemoteUser(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done) {
          impl.getServerRemoteUser(controller, request, done);
        }

      };
    }

    public static org.apache.hadoop.thirdparty.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new org.apache.hadoop.thirdparty.protobuf.BlockingService() {
        public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.thirdparty.protobuf.Message request)
            throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 1:
              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request);
            case 2:
              return impl.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 3:
              return impl.error2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 4:
              return impl.slowPing(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto)request);
            case 5:
              return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2)request);
            case 6:
              return impl.add(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto)request);
            case 7:
              return impl.add2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2)request);
            case 8:
              return impl.testServerGet(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 9:
              return impl.exchange(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto)request);
            case 10:
              return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)request);
            case 11:
              return impl.lockAndSleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)request);
            case 12:
              return impl.getAuthMethod(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 13:
              return impl.getAuthUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 14:
              return impl.echoPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request);
            case 15:
              return impl.sendPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 16:
              return impl.getCurrentUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 17:
              return impl.getServerRemoteUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getRequestPrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 3:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 4:
              return org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.getDefaultInstance();
            case 5:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.getDefaultInstance();
            case 6:
              return org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.getDefaultInstance();
            case 7:
              return org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.getDefaultInstance();
            case 8:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 9:
              return org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.getDefaultInstance();
            case 10:
              return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
            case 11:
              return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
            case 12:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 13:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 14:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
            case 15:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 16:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 17:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getResponsePrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 3:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 4:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 5:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance();
            case 6:
              return org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance();
            case 7:
              return org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance();
            case 8:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 9:
              return org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance();
            case 10:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 11:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 12:
              return org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance();
            case 13:
              return org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance();
            case 14:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
            case 15:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 16:
              return org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance();
            case 17:
              return org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc echo(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
     */
    public abstract void echo(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done);

    /**
     * <code>rpc error(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void error(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc error2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void error2(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc slowPing(.hadoop.common.SlowPingRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void slowPing(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc echo2(.hadoop.common.EchoRequestProto2) returns (.hadoop.common.EchoResponseProto2);</code>
     */
    public abstract void echo2(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2> done);

    /**
     * <code>rpc add(.hadoop.common.AddRequestProto) returns (.hadoop.common.AddResponseProto);</code>
     */
    public abstract void add(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto> done);

    /**
     * <code>rpc add2(.hadoop.common.AddRequestProto2) returns (.hadoop.common.AddResponseProto);</code>
     */
    public abstract void add2(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto> done);

    /**
     * <code>rpc testServerGet(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void testServerGet(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc exchange(.hadoop.common.ExchangeRequestProto) returns (.hadoop.common.ExchangeResponseProto);</code>
     */
    public abstract void exchange(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto> done);

    /**
     * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void sleep(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc lockAndSleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void lockAndSleep(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc getAuthMethod(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthMethodResponseProto);</code>
     */
    public abstract void getAuthMethod(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto> done);

    /**
     * <code>rpc getAuthUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
     */
    public abstract void getAuthUser(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done);

    /**
     * <code>rpc echoPostponed(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
     */
    public abstract void echoPostponed(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done);

    /**
     * <code>rpc sendPostponed(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void sendPostponed(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc getCurrentUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
     */
    public abstract void getCurrentUser(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done);

    /**
     * <code>rpc getServerRemoteUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
     */
    public abstract void getServerRemoteUser(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done);

    public static final
        org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(0);
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.thirdparty.protobuf.Message request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<
          org.apache.hadoop.thirdparty.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto>specializeCallback(
              done));
          return;
        case 2:
          this.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 3:
          this.error2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 4:
          this.slowPing(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 5:
          this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2>specializeCallback(
              done));
          return;
        case 6:
          this.add(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto>specializeCallback(
              done));
          return;
        case 7:
          this.add2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto>specializeCallback(
              done));
          return;
        case 8:
          this.testServerGet(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 9:
          this.exchange(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto>specializeCallback(
              done));
          return;
        case 10:
          this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 11:
          this.lockAndSleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 12:
          this.getAuthMethod(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto>specializeCallback(
              done));
          return;
        case 13:
          this.getAuthUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto>specializeCallback(
              done));
          return;
        case 14:
          this.echoPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto>specializeCallback(
              done));
          return;
        case 15:
          this.sendPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 16:
          this.getCurrentUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto>specializeCallback(
              done));
          return;
        case 17:
          this.getServerRemoteUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getRequestPrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 3:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 4:
          return org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto.getDefaultInstance();
        case 5:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2.getDefaultInstance();
        case 6:
          return org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto.getDefaultInstance();
        case 7:
          return org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2.getDefaultInstance();
        case 8:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 9:
          return org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto.getDefaultInstance();
        case 10:
          return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
        case 11:
          return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
        case 12:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 13:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 14:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
        case 15:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 16:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 17:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getResponsePrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 3:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 4:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 5:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance();
        case 6:
          return org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance();
        case 7:
          return org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance();
        case 8:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 9:
          return org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance();
        case 10:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 11:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 12:
          return org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance();
        case 13:
          return org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance();
        case 14:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
        case 15:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 16:
          return org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance();
        case 17:
          return org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcProto implements Interface {
      private Stub(org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;

      public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()));
      }

      public  void error(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void error2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(3),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void slowPing(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(4),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(5),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance()));
      }

      public  void add(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(6),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance()));
      }

      public  void add2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(7),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance()));
      }

      public  void testServerGet(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(8),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void exchange(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(9),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance()));
      }

      public  void sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(10),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void lockAndSleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(11),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void getAuthMethod(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(12),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance()));
      }

      public  void getAuthUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(13),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance()));
      }

      public  void echoPostponed(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(14),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()));
      }

      public  void sendPostponed(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(15),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void getCurrentUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(16),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance()));
      }

      public  void getServerRemoteUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(17),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto slowPing(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 echo2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto add(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto add2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto testServerGet(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto exchange(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto lockAndSleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto getAuthMethod(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto getAuthUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echoPostponed(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto sendPostponed(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto getCurrentUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto getServerRemoteUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(3),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto slowPing(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SlowPingRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(4),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2 echo2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto2 request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2) channel.callBlockingMethod(
          getDescriptor().getMethods().get(5),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto2.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto add(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(6),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto add2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddRequestProto2 request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(7),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.AddResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto testServerGet(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(8),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto exchange(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(9),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.ExchangeResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(10),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto lockAndSleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(11),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto getAuthMethod(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(12),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.AuthMethodResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto getAuthUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(13),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echoPostponed(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(14),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto sendPostponed(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(15),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto getCurrentUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(16),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto getServerRemoteUser(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(17),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.UserResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpcProto)
  }

  /**
   * Protobuf service {@code hadoop.common.TestProtobufRpc2Proto}
   */
  public static abstract class TestProtobufRpc2Proto
      implements org.apache.hadoop.thirdparty.protobuf.Service {
    protected TestProtobufRpc2Proto() {}

    public interface Interface {
      /**
       * <code>rpc ping2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc echo2(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
       */
      public abstract void echo2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done);

      /**
       * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.SleepResponseProto);</code>
       */
      public abstract void sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto> done);

    }

    public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new TestProtobufRpc2Proto() {
        @java.lang.Override
        public  void ping2(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.ping2(controller, request, done);
        }

        @java.lang.Override
        public  void echo2(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done) {
          impl.echo2(controller, request, done);
        }

        @java.lang.Override
        public  void sleep(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto> done) {
          impl.sleep(controller, request, done);
        }

      };
    }

    public static org.apache.hadoop.thirdparty.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new org.apache.hadoop.thirdparty.protobuf.BlockingService() {
        public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.thirdparty.protobuf.Message request)
            throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 1:
              return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request);
            case 2:
              return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getRequestPrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getResponsePrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping2(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc echo2(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
     */
    public abstract void echo2(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done);

    /**
     * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.SleepResponseProto);</code>
     */
    public abstract void sleep(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto> done);

    public static final
        org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(1);
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.thirdparty.protobuf.Message request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<
          org.apache.hadoop.thirdparty.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto>specializeCallback(
              done));
          return;
        case 2:
          this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getRequestPrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getResponsePrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpc2Proto implements Interface {
      private Stub(org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;

      public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()));
      }

      public  void sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo2(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpc2Proto)
  }

  /**
   * Protobuf service {@code hadoop.common.OldProtobufRpcProto}
   */
  public static abstract class OldProtobufRpcProto
      implements org.apache.hadoop.thirdparty.protobuf.Service {
    protected OldProtobufRpcProto() {}

    public interface Interface {
      /**
       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    }

    public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new OldProtobufRpcProto() {
        @java.lang.Override
        public  void ping(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.ping(controller, request, done);
        }

        @java.lang.Override
        public  void echo(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.echo(controller, request, done);
        }

      };
    }

    public static org.apache.hadoop.thirdparty.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new org.apache.hadoop.thirdparty.protobuf.BlockingService() {
        public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.thirdparty.protobuf.Message request)
            throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 1:
              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getRequestPrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getResponsePrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void echo(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    public static final
        org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(2);
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.thirdparty.protobuf.Message request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<
          org.apache.hadoop.thirdparty.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getRequestPrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getResponsePrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.OldProtobufRpcProto implements Interface {
      private Stub(org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;

      public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.OldProtobufRpcProto)
  }

  /**
   * Protobuf service {@code hadoop.common.NewProtobufRpcProto}
   */
  public static abstract class NewProtobufRpcProto
      implements org.apache.hadoop.thirdparty.protobuf.Service {
    protected NewProtobufRpcProto() {}

    public interface Interface {
      /**
       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc echo(.hadoop.common.OptRequestProto) returns (.hadoop.common.OptResponseProto);</code>
       */
      public abstract void echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto> done);

    }

    public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new NewProtobufRpcProto() {
        @java.lang.Override
        public  void ping(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.ping(controller, request, done);
        }

        @java.lang.Override
        public  void echo(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto> done) {
          impl.echo(controller, request, done);
        }

      };
    }

    public static org.apache.hadoop.thirdparty.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new org.apache.hadoop.thirdparty.protobuf.BlockingService() {
        public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.thirdparty.protobuf.Message request)
            throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 1:
              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getRequestPrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getResponsePrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc echo(.hadoop.common.OptRequestProto) returns (.hadoop.common.OptResponseProto);</code>
     */
    public abstract void echo(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto> done);

    public static final
        org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(3);
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.thirdparty.protobuf.Message request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<
          org.apache.hadoop.thirdparty.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getRequestPrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getResponsePrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.NewProtobufRpcProto implements Interface {
      private Stub(org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;

      public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.OptResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.NewProtobufRpcProto)
  }

  /**
   * Protobuf service {@code hadoop.common.NewerProtobufRpcProto}
   */
  public static abstract class NewerProtobufRpcProto
      implements org.apache.hadoop.thirdparty.protobuf.Service {
    protected NewerProtobufRpcProto() {}

    public interface Interface {
      /**
       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

      /**
       * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    }

    public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new NewerProtobufRpcProto() {
        @java.lang.Override
        public  void ping(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.ping(controller, request, done);
        }

        @java.lang.Override
        public  void echo(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.echo(controller, request, done);
        }

      };
    }

    public static org.apache.hadoop.thirdparty.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new org.apache.hadoop.thirdparty.protobuf.BlockingService() {
        public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.thirdparty.protobuf.Message request)
            throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            case 1:
              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getRequestPrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getResponsePrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    /**
     * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void echo(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    public static final
        org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(4);
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.thirdparty.protobuf.Message request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<
          org.apache.hadoop.thirdparty.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getRequestPrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getResponsePrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.NewerProtobufRpcProto implements Interface {
      private Stub(org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;

      public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto echo(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.NewerProtobufRpcProto)
  }

  /**
   * Protobuf service {@code hadoop.common.CustomProto}
   */
  public static abstract class CustomProto
      implements org.apache.hadoop.thirdparty.protobuf.Service {
    protected CustomProto() {}

    public interface Interface {
      /**
       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    }

    public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new CustomProto() {
        @java.lang.Override
        public  void ping(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
          impl.ping(controller, request, done);
        }

      };
    }

    public static org.apache.hadoop.thirdparty.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new org.apache.hadoop.thirdparty.protobuf.BlockingService() {
        public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.thirdparty.protobuf.Message request)
            throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getRequestPrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getResponsePrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done);

    public static final
        org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(5);
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.thirdparty.protobuf.Message request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<
          org.apache.hadoop.thirdparty.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getRequestPrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getResponsePrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.CustomProto implements Interface {
      private Stub(org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;

      public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.CustomProto)
  }

  /**
   * Protobuf service {@code hadoop.common.TestProtobufRpcHandoffProto}
   */
  public static abstract class TestProtobufRpcHandoffProto
      implements org.apache.hadoop.thirdparty.protobuf.Service {
    protected TestProtobufRpcHandoffProto() {}

    public interface Interface {
      /**
       * <code>rpc sleep(.hadoop.common.SleepRequestProto2) returns (.hadoop.common.SleepResponseProto2);</code>
       */
      public abstract void sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2> done);

    }

    public static org.apache.hadoop.thirdparty.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new TestProtobufRpcHandoffProto() {
        @java.lang.Override
        public  void sleep(
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 request,
            org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2> done) {
          impl.sleep(controller, request, done);
        }

      };
    }

    public static org.apache.hadoop.thirdparty.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new org.apache.hadoop.thirdparty.protobuf.BlockingService() {
        public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message callBlockingMethod(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
            org.apache.hadoop.thirdparty.protobuf.RpcController controller,
            org.apache.hadoop.thirdparty.protobuf.Message request)
            throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getRequestPrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final org.apache.hadoop.thirdparty.protobuf.Message
            getResponsePrototype(
            org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc sleep(.hadoop.common.SleepRequestProto2) returns (.hadoop.common.SleepResponseProto2);</code>
     */
    public abstract void sleep(
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2> done);

    public static final
        org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(6);
    }
    public final org.apache.hadoop.thirdparty.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method,
        org.apache.hadoop.thirdparty.protobuf.RpcController controller,
        org.apache.hadoop.thirdparty.protobuf.Message request,
        org.apache.hadoop.thirdparty.protobuf.RpcCallback<
          org.apache.hadoop.thirdparty.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2)request,
            org.apache.hadoop.thirdparty.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getRequestPrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final org.apache.hadoop.thirdparty.protobuf.Message
        getResponsePrototype(
        org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcHandoffProto implements Interface {
      private Stub(org.apache.hadoop.thirdparty.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.RpcChannel channel;

      public org.apache.hadoop.thirdparty.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 request,
          org.apache.hadoop.thirdparty.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.getDefaultInstance(),
          org.apache.hadoop.thirdparty.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.class,
            org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final org.apache.hadoop.thirdparty.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2 sleep(
          org.apache.hadoop.thirdparty.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepRequestProto2 request)
          throws org.apache.hadoop.thirdparty.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtos.SleepResponseProto2.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpcHandoffProto)
  }


  public static org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static  org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\026test_rpc_service.proto\022\rhadoop.common\032" +
      "\ntest.proto2\330\013\n\024TestProtobufRpcProto\022K\n\004" +
      "ping\022 .hadoop.common.EmptyRequestProto\032!" +
      ".hadoop.common.EmptyResponseProto\022I\n\004ech" +
      "o\022\037.hadoop.common.EchoRequestProto\032 .had" +
      "oop.common.EchoResponseProto\022L\n\005error\022 ." +
      "hadoop.common.EmptyRequestProto\032!.hadoop" +
      ".common.EmptyResponseProto\022M\n\006error2\022 .h" +
      "adoop.common.EmptyRequestProto\032!.hadoop." +
      "common.EmptyResponseProto\022R\n\010slowPing\022#." +
      "hadoop.common.SlowPingRequestProto\032!.had" +
      "oop.common.EmptyResponseProto\022L\n\005echo2\022 " +
      ".hadoop.common.EchoRequestProto2\032!.hadoo" +
      "p.common.EchoResponseProto2\022F\n\003add\022\036.had" +
      "oop.common.AddRequestProto\032\037.hadoop.comm" +
      "on.AddResponseProto\022H\n\004add2\022\037.hadoop.com" +
      "mon.AddRequestProto2\032\037.hadoop.common.Add" +
      "ResponseProto\022T\n\rtestServerGet\022 .hadoop." +
      "common.EmptyRequestProto\032!.hadoop.common" +
      ".EmptyResponseProto\022U\n\010exchange\022#.hadoop" +
      ".common.ExchangeRequestProto\032$.hadoop.co" +
      "mmon.ExchangeResponseProto\022L\n\005sleep\022 .ha" +
      "doop.common.SleepRequestProto\032!.hadoop.c" +
      "ommon.EmptyResponseProto\022S\n\014lockAndSleep" +
      "\022 .hadoop.common.SleepRequestProto\032!.had" +
      "oop.common.EmptyResponseProto\022Y\n\rgetAuth" +
      "Method\022 .hadoop.common.EmptyRequestProto" +
      "\032&.hadoop.common.AuthMethodResponseProto" +
      "\022Q\n\013getAuthUser\022 .hadoop.common.EmptyReq" +
      "uestProto\032 .hadoop.common.UserResponsePr" +
      "oto\022R\n\rechoPostponed\022\037.hadoop.common.Ech" +
      "oRequestProto\032 .hadoop.common.EchoRespon" +
      "seProto\022T\n\rsendPostponed\022 .hadoop.common" +
      ".EmptyRequestProto\032!.hadoop.common.Empty" +
      "ResponseProto\022T\n\016getCurrentUser\022 .hadoop" +
      ".common.EmptyRequestProto\032 .hadoop.commo" +
      "n.UserResponseProto\022Y\n\023getServerRemoteUs" +
      "er\022 .hadoop.common.EmptyRequestProto\032 .h" +
      "adoop.common.UserResponseProto2\377\001\n\025TestP" +
      "rotobufRpc2Proto\022L\n\005ping2\022 .hadoop.commo" +
      "n.EmptyRequestProto\032!.hadoop.common.Empt" +
      "yResponseProto\022J\n\005echo2\022\037.hadoop.common." +
      "EchoRequestProto\032 .hadoop.common.EchoRes" +
      "ponseProto\022L\n\005sleep\022 .hadoop.common.Slee" +
      "pRequestProto\032!.hadoop.common.SleepRespo" +
      "nseProto2\257\001\n\023OldProtobufRpcProto\022K\n\004ping" +
      "\022 .hadoop.common.EmptyRequestProto\032!.had" +
      "oop.common.EmptyResponseProto\022K\n\004echo\022 ." +
      "hadoop.common.EmptyRequestProto\032!.hadoop" +
      ".common.EmptyResponseProto2\253\001\n\023NewProtob" +
      "ufRpcProto\022K\n\004ping\022 .hadoop.common.Empty" +
      "RequestProto\032!.hadoop.common.EmptyRespon" +
      "seProto\022G\n\004echo\022\036.hadoop.common.OptReque" +
      "stProto\032\037.hadoop.common.OptResponseProto" +
      "2\261\001\n\025NewerProtobufRpcProto\022K\n\004ping\022 .had" +
      "oop.common.EmptyRequestProto\032!.hadoop.co" +
      "mmon.EmptyResponseProto\022K\n\004echo\022 .hadoop" +
      ".common.EmptyRequestProto\032!.hadoop.commo" +
      "n.EmptyResponseProto2Z\n\013CustomProto\022K\n\004p" +
      "ing\022 .hadoop.common.EmptyRequestProto\032!." +
      "hadoop.common.EmptyResponseProto2m\n\033Test" +
      "ProtobufRpcHandoffProto\022N\n\005sleep\022!.hadoo" +
      "p.common.SleepRequestProto2\032\".hadoop.com" +
      "mon.SleepResponseProto2B<\n\036org.apache.ha" +
      "doop.ipc.protobufB\024TestRpcServiceProtos\210" +
      "\001\001\240\001\001"
    };
    descriptor = org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new org.apache.hadoop.thirdparty.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.ipc.protobuf.TestProtos.getDescriptor(),
        });
    org.apache.hadoop.ipc.protobuf.TestProtos.getDescriptor();
  }

  // @@protoc_insertion_point(outer_class_scope)
}