TestRpcServiceProtosLegacy.java

// Generated by the protocol buffer compiler.  DO NOT EDIT!
// source: test_rpc_service_legacy.proto

package org.apache.hadoop.ipc.protobuf;

public final class TestRpcServiceProtosLegacy {
  private TestRpcServiceProtosLegacy() {}
  public static void registerAllExtensions(
      com.google.protobuf.ExtensionRegistry registry) {
  }
  /**
   * Protobuf service {@code hadoop.common.TestProtobufRpcProto}
   *
   * <pre>
   **
   * A protobuf service for use in tests
   * </pre>
   */
  public static abstract class TestProtobufRpcProto
      implements com.google.protobuf.Service {
    protected TestProtobufRpcProto() {}

    public interface Interface {
      /**
       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc echo(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
       */
      public abstract void echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);

      /**
       * <code>rpc error(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void error(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc error2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void error2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc slowPing(.hadoop.common.SlowPingRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void slowPing(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc echo2(.hadoop.common.EchoRequestProto2) returns (.hadoop.common.EchoResponseProto2);</code>
       */
      public abstract void echo2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2> done);

      /**
       * <code>rpc add(.hadoop.common.AddRequestProto) returns (.hadoop.common.AddResponseProto);</code>
       */
      public abstract void add(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done);

      /**
       * <code>rpc add2(.hadoop.common.AddRequestProto2) returns (.hadoop.common.AddResponseProto);</code>
       */
      public abstract void add2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done);

      /**
       * <code>rpc testServerGet(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void testServerGet(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc exchange(.hadoop.common.ExchangeRequestProto) returns (.hadoop.common.ExchangeResponseProto);</code>
       */
      public abstract void exchange(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto> done);

      /**
       * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc lockAndSleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void lockAndSleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc getAuthMethod(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthMethodResponseProto);</code>
       */
      public abstract void getAuthMethod(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto> done);

      /**
       * <code>rpc getAuthUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
       */
      public abstract void getAuthUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);

      /**
       * <code>rpc echoPostponed(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
       */
      public abstract void echoPostponed(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);

      /**
       * <code>rpc sendPostponed(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void sendPostponed(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc getCurrentUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
       */
      public abstract void getCurrentUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);

      /**
       * <code>rpc getServerRemoteUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
       */
      public abstract void getServerRemoteUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);

    }

    public static com.google.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new TestProtobufRpcProto() {
        @java.lang.Override
        public  void ping(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.ping(controller, request, done);
        }

        @java.lang.Override
        public  void echo(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
          impl.echo(controller, request, done);
        }

        @java.lang.Override
        public  void error(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.error(controller, request, done);
        }

        @java.lang.Override
        public  void error2(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.error2(controller, request, done);
        }

        @java.lang.Override
        public  void slowPing(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.slowPing(controller, request, done);
        }

        @java.lang.Override
        public  void echo2(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2> done) {
          impl.echo2(controller, request, done);
        }

        @java.lang.Override
        public  void add(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done) {
          impl.add(controller, request, done);
        }

        @java.lang.Override
        public  void add2(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done) {
          impl.add2(controller, request, done);
        }

        @java.lang.Override
        public  void testServerGet(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.testServerGet(controller, request, done);
        }

        @java.lang.Override
        public  void exchange(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto> done) {
          impl.exchange(controller, request, done);
        }

        @java.lang.Override
        public  void sleep(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.sleep(controller, request, done);
        }

        @java.lang.Override
        public  void lockAndSleep(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.lockAndSleep(controller, request, done);
        }

        @java.lang.Override
        public  void getAuthMethod(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto> done) {
          impl.getAuthMethod(controller, request, done);
        }

        @java.lang.Override
        public  void getAuthUser(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
          impl.getAuthUser(controller, request, done);
        }

        @java.lang.Override
        public  void echoPostponed(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
          impl.echoPostponed(controller, request, done);
        }

        @java.lang.Override
        public  void sendPostponed(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.sendPostponed(controller, request, done);
        }

        @java.lang.Override
        public  void getCurrentUser(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
          impl.getCurrentUser(controller, request, done);
        }

        @java.lang.Override
        public  void getServerRemoteUser(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
          impl.getServerRemoteUser(controller, request, done);
        }

      };
    }

    public static com.google.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new com.google.protobuf.BlockingService() {
        public final com.google.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final com.google.protobuf.Message callBlockingMethod(
            com.google.protobuf.Descriptors.MethodDescriptor method,
            com.google.protobuf.RpcController controller,
            com.google.protobuf.Message request)
            throws com.google.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 1:
              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request);
            case 2:
              return impl.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 3:
              return impl.error2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 4:
              return impl.slowPing(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto)request);
            case 5:
              return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2)request);
            case 6:
              return impl.add(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto)request);
            case 7:
              return impl.add2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2)request);
            case 8:
              return impl.testServerGet(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 9:
              return impl.exchange(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto)request);
            case 10:
              return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request);
            case 11:
              return impl.lockAndSleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request);
            case 12:
              return impl.getAuthMethod(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 13:
              return impl.getAuthUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 14:
              return impl.echoPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request);
            case 15:
              return impl.sendPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 16:
              return impl.getCurrentUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 17:
              return impl.getServerRemoteUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getRequestPrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 3:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 4:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.getDefaultInstance();
            case 5:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.getDefaultInstance();
            case 6:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.getDefaultInstance();
            case 7:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.getDefaultInstance();
            case 8:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 9:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.getDefaultInstance();
            case 10:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
            case 11:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
            case 12:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 13:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 14:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
            case 15:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 16:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 17:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getResponsePrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 3:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 4:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 5:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance();
            case 6:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
            case 7:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
            case 8:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 9:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance();
            case 10:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 11:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 12:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance();
            case 13:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
            case 14:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
            case 15:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 16:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
            case 17:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc echo(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
     */
    public abstract void echo(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);

    /**
     * <code>rpc error(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void error(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc error2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void error2(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc slowPing(.hadoop.common.SlowPingRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void slowPing(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc echo2(.hadoop.common.EchoRequestProto2) returns (.hadoop.common.EchoResponseProto2);</code>
     */
    public abstract void echo2(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2> done);

    /**
     * <code>rpc add(.hadoop.common.AddRequestProto) returns (.hadoop.common.AddResponseProto);</code>
     */
    public abstract void add(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done);

    /**
     * <code>rpc add2(.hadoop.common.AddRequestProto2) returns (.hadoop.common.AddResponseProto);</code>
     */
    public abstract void add2(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done);

    /**
     * <code>rpc testServerGet(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void testServerGet(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc exchange(.hadoop.common.ExchangeRequestProto) returns (.hadoop.common.ExchangeResponseProto);</code>
     */
    public abstract void exchange(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto> done);

    /**
     * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void sleep(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc lockAndSleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void lockAndSleep(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc getAuthMethod(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthMethodResponseProto);</code>
     */
    public abstract void getAuthMethod(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto> done);

    /**
     * <code>rpc getAuthUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
     */
    public abstract void getAuthUser(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);

    /**
     * <code>rpc echoPostponed(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
     */
    public abstract void echoPostponed(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);

    /**
     * <code>rpc sendPostponed(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void sendPostponed(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc getCurrentUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
     */
    public abstract void getCurrentUser(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);

    /**
     * <code>rpc getServerRemoteUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);</code>
     */
    public abstract void getServerRemoteUser(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done);

    public static final
        com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(0);
    }
    public final com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        com.google.protobuf.Descriptors.MethodDescriptor method,
        com.google.protobuf.RpcController controller,
        com.google.protobuf.Message request,
        com.google.protobuf.RpcCallback<
          com.google.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto>specializeCallback(
              done));
          return;
        case 2:
          this.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 3:
          this.error2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 4:
          this.slowPing(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 5:
          this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2>specializeCallback(
              done));
          return;
        case 6:
          this.add(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto>specializeCallback(
              done));
          return;
        case 7:
          this.add2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto>specializeCallback(
              done));
          return;
        case 8:
          this.testServerGet(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 9:
          this.exchange(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto>specializeCallback(
              done));
          return;
        case 10:
          this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 11:
          this.lockAndSleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 12:
          this.getAuthMethod(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto>specializeCallback(
              done));
          return;
        case 13:
          this.getAuthUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto>specializeCallback(
              done));
          return;
        case 14:
          this.echoPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto>specializeCallback(
              done));
          return;
        case 15:
          this.sendPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 16:
          this.getCurrentUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto>specializeCallback(
              done));
          return;
        case 17:
          this.getServerRemoteUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getRequestPrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 3:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 4:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.getDefaultInstance();
        case 5:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.getDefaultInstance();
        case 6:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.getDefaultInstance();
        case 7:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.getDefaultInstance();
        case 8:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 9:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.getDefaultInstance();
        case 10:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
        case 11:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
        case 12:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 13:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 14:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
        case 15:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 16:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 17:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getResponsePrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 3:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 4:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 5:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance();
        case 6:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
        case 7:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
        case 8:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 9:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance();
        case 10:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 11:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 12:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance();
        case 13:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
        case 14:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
        case 15:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 16:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
        case 17:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        com.google.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.TestProtobufRpcProto implements Interface {
      private Stub(com.google.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.RpcChannel channel;

      public com.google.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()));
      }

      public  void error(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void error2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(3),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void slowPing(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(4),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(5),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance()));
      }

      public  void add(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(6),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance()));
      }

      public  void add2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(7),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance()));
      }

      public  void testServerGet(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(8),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void exchange(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(9),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance()));
      }

      public  void sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(10),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void lockAndSleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(11),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void getAuthMethod(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(12),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance()));
      }

      public  void getAuthUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(13),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()));
      }

      public  void echoPostponed(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(14),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()));
      }

      public  void sendPostponed(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(15),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void getCurrentUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(16),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()));
      }

      public  void getServerRemoteUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(17),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        com.google.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto slowPing(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 echo2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto testServerGet(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto exchange(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto lockAndSleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto getAuthMethod(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getAuthUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echoPostponed(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sendPostponed(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getCurrentUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getServerRemoteUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(3),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto slowPing(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(4),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 echo2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2) channel.callBlockingMethod(
          getDescriptor().getMethods().get(5),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(6),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(7),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto testServerGet(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(8),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto exchange(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(9),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(10),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto lockAndSleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(11),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto getAuthMethod(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(12),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getAuthUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(13),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echoPostponed(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(14),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sendPostponed(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(15),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getCurrentUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(16),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getServerRemoteUser(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(17),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpcProto)
  }

  /**
   * Protobuf service {@code hadoop.common.TestProtobufRpc2Proto}
   */
  public static abstract class TestProtobufRpc2Proto
      implements com.google.protobuf.Service {
    protected TestProtobufRpc2Proto() {}

    public interface Interface {
      /**
       * <code>rpc ping2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc echo2(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
       */
      public abstract void echo2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);

      /**
       * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.SleepResponseProto);</code>
       */
      public abstract void sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto> done);

    }

    public static com.google.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new TestProtobufRpc2Proto() {
        @java.lang.Override
        public  void ping2(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.ping2(controller, request, done);
        }

        @java.lang.Override
        public  void echo2(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
          impl.echo2(controller, request, done);
        }

        @java.lang.Override
        public  void sleep(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto> done) {
          impl.sleep(controller, request, done);
        }

      };
    }

    public static com.google.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new com.google.protobuf.BlockingService() {
        public final com.google.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final com.google.protobuf.Message callBlockingMethod(
            com.google.protobuf.Descriptors.MethodDescriptor method,
            com.google.protobuf.RpcController controller,
            com.google.protobuf.Message request)
            throws com.google.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 1:
              return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request);
            case 2:
              return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getRequestPrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getResponsePrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
            case 2:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping2(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc echo2(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);</code>
     */
    public abstract void echo2(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done);

    /**
     * <code>rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.SleepResponseProto);</code>
     */
    public abstract void sleep(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto> done);

    public static final
        com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(1);
    }
    public final com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        com.google.protobuf.Descriptors.MethodDescriptor method,
        com.google.protobuf.RpcController controller,
        com.google.protobuf.Message request,
        com.google.protobuf.RpcCallback<
          com.google.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto>specializeCallback(
              done));
          return;
        case 2:
          this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getRequestPrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getResponsePrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
        case 2:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        com.google.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.TestProtobufRpc2Proto implements Interface {
      private Stub(com.google.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.RpcChannel channel;

      public com.google.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()));
      }

      public  void sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        com.google.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
          throws com.google.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo2(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(2),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpc2Proto)
  }

  /**
   * Protobuf service {@code hadoop.common.OldProtobufRpcProto}
   */
  public static abstract class OldProtobufRpcProto
      implements com.google.protobuf.Service {
    protected OldProtobufRpcProto() {}

    public interface Interface {
      /**
       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    }

    public static com.google.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new OldProtobufRpcProto() {
        @java.lang.Override
        public  void ping(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.ping(controller, request, done);
        }

        @java.lang.Override
        public  void echo(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.echo(controller, request, done);
        }

      };
    }

    public static com.google.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new com.google.protobuf.BlockingService() {
        public final com.google.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final com.google.protobuf.Message callBlockingMethod(
            com.google.protobuf.Descriptors.MethodDescriptor method,
            com.google.protobuf.RpcController controller,
            com.google.protobuf.Message request)
            throws com.google.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 1:
              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getRequestPrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getResponsePrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void echo(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    public static final
        com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(2);
    }
    public final com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        com.google.protobuf.Descriptors.MethodDescriptor method,
        com.google.protobuf.RpcController controller,
        com.google.protobuf.Message request,
        com.google.protobuf.RpcCallback<
          com.google.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getRequestPrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getResponsePrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        com.google.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.OldProtobufRpcProto implements Interface {
      private Stub(com.google.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.RpcChannel channel;

      public com.google.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        com.google.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.OldProtobufRpcProto)
  }

  /**
   * Protobuf service {@code hadoop.common.NewProtobufRpcProto}
   */
  public static abstract class NewProtobufRpcProto
      implements com.google.protobuf.Service {
    protected NewProtobufRpcProto() {}

    public interface Interface {
      /**
       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc echo(.hadoop.common.OptRequestProto) returns (.hadoop.common.OptResponseProto);</code>
       */
      public abstract void echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto> done);

    }

    public static com.google.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new NewProtobufRpcProto() {
        @java.lang.Override
        public  void ping(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.ping(controller, request, done);
        }

        @java.lang.Override
        public  void echo(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto> done) {
          impl.echo(controller, request, done);
        }

      };
    }

    public static com.google.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new com.google.protobuf.BlockingService() {
        public final com.google.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final com.google.protobuf.Message callBlockingMethod(
            com.google.protobuf.Descriptors.MethodDescriptor method,
            com.google.protobuf.RpcController controller,
            com.google.protobuf.Message request)
            throws com.google.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 1:
              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getRequestPrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getResponsePrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc echo(.hadoop.common.OptRequestProto) returns (.hadoop.common.OptResponseProto);</code>
     */
    public abstract void echo(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto> done);

    public static final
        com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(3);
    }
    public final com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        com.google.protobuf.Descriptors.MethodDescriptor method,
        com.google.protobuf.RpcController controller,
        com.google.protobuf.Message request,
        com.google.protobuf.RpcCallback<
          com.google.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getRequestPrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getResponsePrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        com.google.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.NewProtobufRpcProto implements Interface {
      private Stub(com.google.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.RpcChannel channel;

      public com.google.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        com.google.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request)
          throws com.google.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.NewProtobufRpcProto)
  }

  /**
   * Protobuf service {@code hadoop.common.NewerProtobufRpcProto}
   */
  public static abstract class NewerProtobufRpcProto
      implements com.google.protobuf.Service {
    protected NewerProtobufRpcProto() {}

    public interface Interface {
      /**
       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

      /**
       * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    }

    public static com.google.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new NewerProtobufRpcProto() {
        @java.lang.Override
        public  void ping(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.ping(controller, request, done);
        }

        @java.lang.Override
        public  void echo(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.echo(controller, request, done);
        }

      };
    }

    public static com.google.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new com.google.protobuf.BlockingService() {
        public final com.google.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final com.google.protobuf.Message callBlockingMethod(
            com.google.protobuf.Descriptors.MethodDescriptor method,
            com.google.protobuf.RpcController controller,
            com.google.protobuf.Message request)
            throws com.google.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            case 1:
              return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getRequestPrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getResponsePrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            case 1:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    /**
     * <code>rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void echo(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    public static final
        com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(4);
    }
    public final com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        com.google.protobuf.Descriptors.MethodDescriptor method,
        com.google.protobuf.RpcController controller,
        com.google.protobuf.Message request,
        com.google.protobuf.RpcCallback<
          com.google.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        case 1:
          this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getRequestPrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getResponsePrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        case 1:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        com.google.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.NewerProtobufRpcProto implements Interface {
      private Stub(com.google.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.RpcChannel channel;

      public com.google.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }

      public  void echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        com.google.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }


      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(1),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.NewerProtobufRpcProto)
  }

  /**
   * Protobuf service {@code hadoop.common.CustomProto}
   */
  public static abstract class CustomProto
      implements com.google.protobuf.Service {
    protected CustomProto() {}

    public interface Interface {
      /**
       * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
       */
      public abstract void ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    }

    public static com.google.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new CustomProto() {
        @java.lang.Override
        public  void ping(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
          impl.ping(controller, request, done);
        }

      };
    }

    public static com.google.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new com.google.protobuf.BlockingService() {
        public final com.google.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final com.google.protobuf.Message callBlockingMethod(
            com.google.protobuf.Descriptors.MethodDescriptor method,
            com.google.protobuf.RpcController controller,
            com.google.protobuf.Message request)
            throws com.google.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getRequestPrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getResponsePrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);</code>
     */
    public abstract void ping(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done);

    public static final
        com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(5);
    }
    public final com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        com.google.protobuf.Descriptors.MethodDescriptor method,
        com.google.protobuf.RpcController controller,
        com.google.protobuf.Message request,
        com.google.protobuf.RpcCallback<
          com.google.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getRequestPrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getResponsePrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        com.google.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.CustomProto implements Interface {
      private Stub(com.google.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.RpcChannel channel;

      public com.google.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        com.google.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.CustomProto)
  }

  /**
   * Protobuf service {@code hadoop.common.TestProtobufRpcHandoffProto}
   */
  public static abstract class TestProtobufRpcHandoffProto
      implements com.google.protobuf.Service {
    protected TestProtobufRpcHandoffProto() {}

    public interface Interface {
      /**
       * <code>rpc sleep(.hadoop.common.SleepRequestProto2) returns (.hadoop.common.SleepResponseProto2);</code>
       */
      public abstract void sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done);

    }

    public static com.google.protobuf.Service newReflectiveService(
        final Interface impl) {
      return new TestProtobufRpcHandoffProto() {
        @java.lang.Override
        public  void sleep(
            com.google.protobuf.RpcController controller,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request,
            com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done) {
          impl.sleep(controller, request, done);
        }

      };
    }

    public static com.google.protobuf.BlockingService
        newReflectiveBlockingService(final BlockingInterface impl) {
      return new com.google.protobuf.BlockingService() {
        public final com.google.protobuf.Descriptors.ServiceDescriptor
            getDescriptorForType() {
          return getDescriptor();
        }

        public final com.google.protobuf.Message callBlockingMethod(
            com.google.protobuf.Descriptors.MethodDescriptor method,
            com.google.protobuf.RpcController controller,
            com.google.protobuf.Message request)
            throws com.google.protobuf.ServiceException {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.callBlockingMethod() given method descriptor for " +
              "wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)request);
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getRequestPrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getRequestPrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

        public final com.google.protobuf.Message
            getResponsePrototype(
            com.google.protobuf.Descriptors.MethodDescriptor method) {
          if (method.getService() != getDescriptor()) {
            throw new java.lang.IllegalArgumentException(
              "Service.getResponsePrototype() given method " +
              "descriptor for wrong service type.");
          }
          switch(method.getIndex()) {
            case 0:
              return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance();
            default:
              throw new java.lang.AssertionError("Can't get here.");
          }
        }

      };
    }

    /**
     * <code>rpc sleep(.hadoop.common.SleepRequestProto2) returns (.hadoop.common.SleepResponseProto2);</code>
     */
    public abstract void sleep(
        com.google.protobuf.RpcController controller,
        org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request,
        com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done);

    public static final
        com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptor() {
      return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(6);
    }
    public final com.google.protobuf.Descriptors.ServiceDescriptor
        getDescriptorForType() {
      return getDescriptor();
    }

    public final void callMethod(
        com.google.protobuf.Descriptors.MethodDescriptor method,
        com.google.protobuf.RpcController controller,
        com.google.protobuf.Message request,
        com.google.protobuf.RpcCallback<
          com.google.protobuf.Message> done) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.callMethod() given method descriptor for wrong " +
          "service type.");
      }
      switch(method.getIndex()) {
        case 0:
          this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2)request,
            com.google.protobuf.RpcUtil.<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2>specializeCallback(
              done));
          return;
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getRequestPrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getRequestPrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public final com.google.protobuf.Message
        getResponsePrototype(
        com.google.protobuf.Descriptors.MethodDescriptor method) {
      if (method.getService() != getDescriptor()) {
        throw new java.lang.IllegalArgumentException(
          "Service.getResponsePrototype() given method " +
          "descriptor for wrong service type.");
      }
      switch(method.getIndex()) {
        case 0:
          return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance();
        default:
          throw new java.lang.AssertionError("Can't get here.");
      }
    }

    public static Stub newStub(
        com.google.protobuf.RpcChannel channel) {
      return new Stub(channel);
    }

    public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.TestProtobufRpcHandoffProto implements Interface {
      private Stub(com.google.protobuf.RpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.RpcChannel channel;

      public com.google.protobuf.RpcChannel getChannel() {
        return channel;
      }

      public  void sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request,
          com.google.protobuf.RpcCallback<org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2> done) {
        channel.callMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance(),
          com.google.protobuf.RpcUtil.generalizeCallback(
            done,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.class,
            org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance()));
      }
    }

    public static BlockingInterface newBlockingStub(
        com.google.protobuf.BlockingRpcChannel channel) {
      return new BlockingStub(channel);
    }

    public interface BlockingInterface {
      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request)
          throws com.google.protobuf.ServiceException;
    }

    private static final class BlockingStub implements BlockingInterface {
      private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
        this.channel = channel;
      }

      private final com.google.protobuf.BlockingRpcChannel channel;

      public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 sleep(
          com.google.protobuf.RpcController controller,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto2 request)
          throws com.google.protobuf.ServiceException {
        return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2) channel.callBlockingMethod(
          getDescriptor().getMethods().get(0),
          controller,
          request,
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance());
      }

    }

    // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpcHandoffProto)
  }


  public static com.google.protobuf.Descriptors.FileDescriptor
      getDescriptor() {
    return descriptor;
  }
  private static com.google.protobuf.Descriptors.FileDescriptor
      descriptor;
  static {
    java.lang.String[] descriptorData = {
      "\n\035test_rpc_service_legacy.proto\022\rhadoop." +
      "common\032\021test_legacy.proto2\330\013\n\024TestProtob" +
      "ufRpcProto\022K\n\004ping\022 .hadoop.common.Empty" +
      "RequestProto\032!.hadoop.common.EmptyRespon" +
      "seProto\022I\n\004echo\022\037.hadoop.common.EchoRequ" +
      "estProto\032 .hadoop.common.EchoResponsePro" +
      "to\022L\n\005error\022 .hadoop.common.EmptyRequest" +
      "Proto\032!.hadoop.common.EmptyResponseProto" +
      "\022M\n\006error2\022 .hadoop.common.EmptyRequestP" +
      "roto\032!.hadoop.common.EmptyResponseProto\022",
      "R\n\010slowPing\022#.hadoop.common.SlowPingRequ" +
      "estProto\032!.hadoop.common.EmptyResponsePr" +
      "oto\022L\n\005echo2\022 .hadoop.common.EchoRequest" +
      "Proto2\032!.hadoop.common.EchoResponseProto" +
      "2\022F\n\003add\022\036.hadoop.common.AddRequestProto" +
      "\032\037.hadoop.common.AddResponseProto\022H\n\004add" +
      "2\022\037.hadoop.common.AddRequestProto2\032\037.had" +
      "oop.common.AddResponseProto\022T\n\rtestServe" +
      "rGet\022 .hadoop.common.EmptyRequestProto\032!" +
      ".hadoop.common.EmptyResponseProto\022U\n\010exc",
      "hange\022#.hadoop.common.ExchangeRequestPro" +
      "to\032$.hadoop.common.ExchangeResponseProto" +
      "\022L\n\005sleep\022 .hadoop.common.SleepRequestPr" +
      "oto\032!.hadoop.common.EmptyResponseProto\022S" +
      "\n\014lockAndSleep\022 .hadoop.common.SleepRequ" +
      "estProto\032!.hadoop.common.EmptyResponsePr" +
      "oto\022Y\n\rgetAuthMethod\022 .hadoop.common.Emp" +
      "tyRequestProto\032&.hadoop.common.AuthMetho" +
      "dResponseProto\022Q\n\013getAuthUser\022 .hadoop.c" +
      "ommon.EmptyRequestProto\032 .hadoop.common.",
      "UserResponseProto\022R\n\rechoPostponed\022\037.had" +
      "oop.common.EchoRequestProto\032 .hadoop.com" +
      "mon.EchoResponseProto\022T\n\rsendPostponed\022 " +
      ".hadoop.common.EmptyRequestProto\032!.hadoo" +
      "p.common.EmptyResponseProto\022T\n\016getCurren" +
      "tUser\022 .hadoop.common.EmptyRequestProto\032" +
      " .hadoop.common.UserResponseProto\022Y\n\023get" +
      "ServerRemoteUser\022 .hadoop.common.EmptyRe" +
      "questProto\032 .hadoop.common.UserResponseP" +
      "roto2\377\001\n\025TestProtobufRpc2Proto\022L\n\005ping2\022",
      " .hadoop.common.EmptyRequestProto\032!.hado" +
      "op.common.EmptyResponseProto\022J\n\005echo2\022\037." +
      "hadoop.common.EchoRequestProto\032 .hadoop." +
      "common.EchoResponseProto\022L\n\005sleep\022 .hado" +
      "op.common.SleepRequestProto\032!.hadoop.com" +
      "mon.SleepResponseProto2\257\001\n\023OldProtobufRp" +
      "cProto\022K\n\004ping\022 .hadoop.common.EmptyRequ" +
      "estProto\032!.hadoop.common.EmptyResponsePr" +
      "oto\022K\n\004echo\022 .hadoop.common.EmptyRequest" +
      "Proto\032!.hadoop.common.EmptyResponseProto",
      "2\253\001\n\023NewProtobufRpcProto\022K\n\004ping\022 .hadoo" +
      "p.common.EmptyRequestProto\032!.hadoop.comm" +
      "on.EmptyResponseProto\022G\n\004echo\022\036.hadoop.c" +
      "ommon.OptRequestProto\032\037.hadoop.common.Op" +
      "tResponseProto2\261\001\n\025NewerProtobufRpcProto" +
      "\022K\n\004ping\022 .hadoop.common.EmptyRequestPro" +
      "to\032!.hadoop.common.EmptyResponseProto\022K\n" +
      "\004echo\022 .hadoop.common.EmptyRequestProto\032" +
      "!.hadoop.common.EmptyResponseProto2Z\n\013Cu" +
      "stomProto\022K\n\004ping\022 .hadoop.common.EmptyR",
      "equestProto\032!.hadoop.common.EmptyRespons" +
      "eProto2m\n\033TestProtobufRpcHandoffProto\022N\n" +
      "\005sleep\022!.hadoop.common.SleepRequestProto" +
      "2\032\".hadoop.common.SleepResponseProto2BB\n" +
      "\036org.apache.hadoop.ipc.protobufB\032TestRpc" +
      "ServiceProtosLegacy\210\001\001\240\001\001"
    };
    com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
      new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
        public com.google.protobuf.ExtensionRegistry assignDescriptors(
            com.google.protobuf.Descriptors.FileDescriptor root) {
          descriptor = root;
          return null;
        }
      };
    com.google.protobuf.Descriptors.FileDescriptor
      .internalBuildGeneratedFileFrom(descriptorData,
        new com.google.protobuf.Descriptors.FileDescriptor[] {
          org.apache.hadoop.ipc.protobuf.TestProtosLegacy.getDescriptor(),
        }, assigner);
  }

  // @@protoc_insertion_point(outer_class_scope)
}