Exemplo n.º 1
0
  def set_up_implementation(
      self,
      name,
      methods,
      inline_value_in_value_out_methods,
      inline_value_in_stream_out_methods,
      inline_stream_in_value_out_methods,
      inline_stream_in_stream_out_methods,
      event_value_in_value_out_methods,
      event_value_in_stream_out_methods,
      event_stream_in_value_out_methods,
      event_stream_in_stream_out_methods,
      multi_method):
    servicer_pool = logging_pool.pool(_MAXIMUM_POOL_SIZE)
    stub_pool = logging_pool.pool(_MAXIMUM_POOL_SIZE)

    servicer = implementations.servicer(
        servicer_pool,
        inline_value_in_value_out_methods=inline_value_in_value_out_methods,
        inline_value_in_stream_out_methods=inline_value_in_stream_out_methods,
        inline_stream_in_value_out_methods=inline_stream_in_value_out_methods,
        inline_stream_in_stream_out_methods=inline_stream_in_stream_out_methods,
        event_value_in_value_out_methods=event_value_in_value_out_methods,
        event_value_in_stream_out_methods=event_value_in_stream_out_methods,
        event_stream_in_value_out_methods=event_stream_in_value_out_methods,
        event_stream_in_stream_out_methods=event_stream_in_stream_out_methods,
        multi_method=multi_method)

    linked_pair = base_util.linked_pair(servicer, _TIMEOUT)
    server = implementations.server()
    stub = implementations.stub(linked_pair.front, stub_pool)
    return server, stub, (servicer_pool, stub_pool, linked_pair)
Exemplo n.º 2
0
  def set_up_implementation(
      self, name, methods, method_implementations,
      multi_method_implementation):
    servicer_pool = logging_pool.pool(_MAXIMUM_POOL_SIZE)
    stub_pool = logging_pool.pool(_MAXIMUM_POOL_SIZE)

    servicer = implementations.servicer(
        servicer_pool, method_implementations, multi_method_implementation)

    linked_pair = base_util.linked_pair(servicer, _TIMEOUT)
    stub = implementations.generic_stub(linked_pair.front, stub_pool)
    return stub, (servicer_pool, stub_pool, linked_pair)
Exemplo n.º 3
0
 def __enter__(self):
     with self._lock:
         if self._pool is None:
             self._pool = logging_pool.pool(_THREAD_POOL_SIZE)
             self._front = _base_implementations.front_link(self._pool, self._pool, self._pool)
             self._rear_link = _rear.RearLink(
                 self._host,
                 self._port,
                 self._pool,
                 self._breakdown.request_serializers,
                 self._breakdown.response_deserializers,
                 self._secure,
                 self._root_certificates,
                 self._private_key,
                 self._certificate_chain,
                 metadata_transformer=self._metadata_transformer,
                 server_host_override=self._server_host_override,
             )
             self._front.join_rear_link(self._rear_link)
             self._rear_link.join_fore_link(self._front)
             self._rear_link.start()
             self._understub = _face_implementations.dynamic_stub(
                 self._breakdown.face_cardinalities, self._front, self._pool, ""
             )
         else:
             raise ValueError("Tried to __enter__ already-__enter__ed Stub!")
     return self
Exemplo n.º 4
0
  def set_up_implementation(
      self, name, methods, method_implementations,
      multi_method_implementation):
    pool = logging_pool.pool(_MAXIMUM_POOL_SIZE)

    servicer = face_implementations.servicer(
        pool, method_implementations, multi_method_implementation)

    serialization = serial.serialization(methods)

    fore_link = fore.ForeLink(
        pool, serialization.request_deserializers,
        serialization.response_serializers, None, ())
    fore_link.start()
    port = fore_link.port()
    rear_link = rear.RearLink(
        'localhost', port, pool,
        serialization.request_serializers,
        serialization.response_deserializers, False, None, None, None)
    rear_link.start()
    front = base_implementations.front_link(pool, pool, pool)
    back = base_implementations.back_link(
        servicer, pool, pool, pool, _TIMEOUT, _MAXIMUM_TIMEOUT)
    fore_link.join_rear_link(back)
    back.join_fore_link(fore_link)
    rear_link.join_fore_link(front)
    front.join_rear_link(rear_link)

    stub = face_implementations.generic_stub(front, pool)
    return stub, (rear_link, fore_link, front, back)
Exemplo n.º 5
0
    def test_reconnect(self):
        server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
        handler = grpc.method_handlers_generic_handler('test', {
            'UnaryUnary':
            grpc.unary_unary_rpc_method_handler(_handle_unary_unary)
        })
        sock_opt = _get_reuse_socket_option()
        port = _pick_and_bind_port(sock_opt)
        self.assertIsNotNone(port)

        server = grpc.server(server_pool, (handler,))
        server.add_insecure_port('[::]:{}'.format(port))
        server.start()
        channel = grpc.insecure_channel('localhost:%d' % port)
        multi_callable = channel.unary_unary(_UNARY_UNARY)
        self.assertEqual(_RESPONSE, multi_callable(_REQUEST))
        server.stop(None)
        # By default, the channel connectivity is checked every 5s
        # GRPC_CLIENT_CHANNEL_BACKUP_POLL_INTERVAL_MS can be set to change
        # this.
        time.sleep(5.1)
        server = grpc.server(server_pool, (handler,))
        server.add_insecure_port('[::]:{}'.format(port))
        server.start()
        self.assertEqual(_RESPONSE, multi_callable(_REQUEST))
        server.stop(None)
        channel.close()
Exemplo n.º 6
0
    def testTaskExecuted(self):
        test_list = []

        with logging_pool.pool(_POOL_SIZE) as pool:
            pool.submit(lambda: test_list.append(object())).result()

        self.assertTrue(test_list)
Exemplo n.º 7
0
  def testWaitingForSomeButNotAllConcurrentFutureInvocations(self):
    pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
    request = b'\x67\x68'
    expected_response = self._handler.handle_unary_unary(request, None)
    response_futures = [None] * test_constants.THREAD_CONCURRENCY
    lock = threading.Lock()
    test_is_running_cell = [True]
    def wrap_future(future):
      def wrap():
        try:
          return future.result()
        except grpc.RpcError:
          with lock:
            if test_is_running_cell[0]:
              raise
          return None
      return wrap

    multi_callable = _unary_unary_multi_callable(self._channel)
    for index in range(test_constants.THREAD_CONCURRENCY):
      inner_response_future = multi_callable.future(
          request,
          metadata=(
              (b'test',
               b'WaitingForSomeButNotAllConcurrentFutureInvocations'),))
      outer_response_future = pool.submit(wrap_future(inner_response_future))
      response_futures[index] = outer_response_future

    some_completed_response_futures_iterator = itertools.islice(
        futures.as_completed(response_futures),
        test_constants.THREAD_CONCURRENCY // 2)
    for response_future in some_completed_response_futures_iterator:
      self.assertEqual(expected_response, response_future.result())
    with lock:
      test_is_running_cell[0] = False
Exemplo n.º 8
0
    def setUp(self):
        self._servicer = _Servicer()
        self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
        self._server = grpc.server(
            self._server_pool, handlers=(_generic_handler(self._servicer),))
        port = self._server.add_insecure_port('[::]:0')
        self._server.start()

        channel = grpc.insecure_channel('localhost:{}'.format(port))
        self._unary_unary = channel.unary_unary(
            '/'.join((
                '',
                _SERVICE,
                _UNARY_UNARY,)),
            request_serializer=_REQUEST_SERIALIZER,
            response_deserializer=_RESPONSE_DESERIALIZER,)
        self._unary_stream = channel.unary_stream('/'.join((
            '',
            _SERVICE,
            _UNARY_STREAM,)),)
        self._stream_unary = channel.stream_unary('/'.join((
            '',
            _SERVICE,
            _STREAM_UNARY,)),)
        self._stream_stream = channel.stream_stream(
            '/'.join((
                '',
                _SERVICE,
                _STREAM_STREAM,)),
            request_serializer=_REQUEST_SERIALIZER,
            response_deserializer=_RESPONSE_DESERIALIZER,)
Exemplo n.º 9
0
 def testCallableObjectExecuted(self):
     callable_object = _CallableObject()
     passed_object = object()
     with logging_pool.pool(_POOL_SIZE) as pool:
         future = pool.submit(callable_object, passed_object)
     self.assertIsNone(future.result())
     self.assertSequenceEqual((passed_object,), callable_object.passed_values())
  def instantiate(
      self, methods, method_implementations, multi_method_implementation):
    pool = logging_pool.pool(test_constants.POOL_SIZE)
    servicer = crust_implementations.servicer(
        method_implementations, multi_method_implementation, pool)

    service_end_link = core_implementations.service_end_link(
        servicer, test_constants.DEFAULT_TIMEOUT,
        test_constants.MAXIMUM_TIMEOUT)
    invocation_end_link = core_implementations.invocation_end_link()
    invocation_end_link.join_link(service_end_link)
    service_end_link.join_link(invocation_end_link)
    service_end_link.start()
    invocation_end_link.start()

    generic_stub = crust_implementations.generic_stub(invocation_end_link, pool)
    # TODO(nathaniel): Add a "groups" attribute to _digest.TestServiceDigest.
    group = next(iter(methods))[0]
    # TODO(nathaniel): Add a "cardinalities_by_group" attribute to
    # _digest.TestServiceDigest.
    cardinalities = {
        method: method_object.cardinality()
        for (group, method), method_object in methods.iteritems()}
    dynamic_stub = crust_implementations.dynamic_stub(
        invocation_end_link, group, cardinalities, pool)

    return generic_stub, {group: dynamic_stub}, (
        invocation_end_link, service_end_link, pool)
Exemplo n.º 11
0
 def test_shutdown_handler(self):
   server = grpc.server(logging_pool.pool(1))
   handler = _ShutDownHandler()
   server.add_shutdown_handler(handler.shutdown_handler)
   server.start()
   server.stop(0, shutdown_handler_grace=SHUTDOWN_GRACE).wait()
   self.assertEqual(SHUTDOWN_GRACE, handler.seen_handler_grace)
Exemplo n.º 12
0
 def setUp(self):
     self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
     self._server = grpc.server(
         self._server_pool, handlers=(_GenericHandler(),))
     port = self._server.add_insecure_port('[::]:0')
     self._server.start()
     self._channel = grpc.insecure_channel('localhost:%d' % port)
Exemplo n.º 13
0
    def testSecureClientCert(self):
        server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
        handler = grpc.method_handlers_generic_handler('test', {
            'UnaryUnary':
            grpc.unary_unary_rpc_method_handler(handle_unary_unary)
        })
        server = grpc.server(server_pool, (handler,))
        server_cred = grpc.ssl_server_credentials(
            _SERVER_CERTS,
            root_certificates=_TEST_ROOT_CERTIFICATES,
            require_client_auth=True)
        port = server.add_secure_port('[::]:0', server_cred)
        server.start()

        channel_creds = grpc.ssl_channel_credentials(
            root_certificates=_TEST_ROOT_CERTIFICATES,
            private_key=_PRIVATE_KEY,
            certificate_chain=_CERTIFICATE_CHAIN)
        channel = grpc.secure_channel(
            'localhost:{}'.format(port),
            channel_creds,
            options=_PROPERTY_OPTIONS)

        response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
        server.stop(None)

        auth_data = pickle.loads(response)
        auth_ctx = auth_data[_AUTH_CTX]
        six.assertCountEqual(self, _CLIENT_IDS, auth_data[_ID])
        self.assertEqual('x509_subject_alternative_name', auth_data[_ID_KEY])
        self.assertSequenceEqual([b'ssl'], auth_ctx['transport_security_type'])
        self.assertSequenceEqual([b'*.test.google.com'],
                                 auth_ctx['x509_common_name'])
Exemplo n.º 14
0
    def testSecureNoCert(self):
        server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
        handler = grpc.method_handlers_generic_handler('test', {
            'UnaryUnary':
            grpc.unary_unary_rpc_method_handler(handle_unary_unary)
        })
        server = grpc.server(server_pool, (handler,))
        server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
        port = server.add_secure_port('[::]:0', server_cred)
        server.start()

        channel_creds = grpc.ssl_channel_credentials(
            root_certificates=_TEST_ROOT_CERTIFICATES)
        channel = grpc.secure_channel(
            'localhost:{}'.format(port),
            channel_creds,
            options=_PROPERTY_OPTIONS)
        response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
        server.stop(None)

        auth_data = pickle.loads(response)
        self.assertIsNone(auth_data[_ID])
        self.assertIsNone(auth_data[_ID_KEY])
        self.assertDictEqual({
            'transport_security_type': [b'ssl']
        }, auth_data[_AUTH_CTX])
Exemplo n.º 15
0
 def start(self):
   """See base.End.start for specification."""
   with self._lock:
     if self._cycle is not None:
       raise ValueError('Tried to start a not-stopped End!')
     else:
       self._cycle = _Cycle(logging_pool.pool(1))
Exemplo n.º 16
0
 def setUp(self):
   if self._synchronicity_variation:
     self._pool = logging_pool.pool(test_constants.POOL_SIZE)
   else:
     self._pool = None
   self._controller = self._controller_creator.controller(
       self._implementation, self._randomness)
Exemplo n.º 17
0
    def testCancelManyCalls(self):
        server_thread_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)

        server_completion_queue = cygrpc.CompletionQueue()
        server = cygrpc.Server(cygrpc.ChannelArgs([]))
        server.register_completion_queue(server_completion_queue)
        port = server.add_http2_port(b"[::]:0")
        server.start()
        channel = cygrpc.Channel("localhost:{}".format(port).encode(), cygrpc.ChannelArgs([]))

        state = _State()

        server_thread_args = (state, server, server_completion_queue, server_thread_pool)
        server_thread = threading.Thread(target=_serve, args=server_thread_args)
        server_thread.start()

        client_condition = threading.Condition()
        client_due = set()
        client_completion_queue = cygrpc.CompletionQueue()
        client_driver = _QueueDriver(client_condition, client_completion_queue, client_due)
        client_driver.start()

        with client_condition:
            client_calls = []
            for index in range(test_constants.RPC_CONCURRENCY):
                client_call = channel.create_call(
                    None, _EMPTY_FLAGS, client_completion_queue, b"/twinkies", None, _INFINITE_FUTURE
                )
                operations = (
                    cygrpc.operation_send_initial_metadata(_EMPTY_METADATA, _EMPTY_FLAGS),
                    cygrpc.operation_send_message(b"\x45\x56", _EMPTY_FLAGS),
                    cygrpc.operation_send_close_from_client(_EMPTY_FLAGS),
                    cygrpc.operation_receive_initial_metadata(_EMPTY_FLAGS),
                    cygrpc.operation_receive_message(_EMPTY_FLAGS),
                    cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS),
                )
                tag = "client_complete_call_{0:04d}_tag".format(index)
                client_call.start_client_batch(cygrpc.Operations(operations), tag)
                client_due.add(tag)
                client_calls.append(client_call)

        with state.condition:
            while True:
                if state.parked_handlers < test_constants.THREAD_CONCURRENCY:
                    state.condition.wait()
                elif state.handled_rpcs < test_constants.RPC_CONCURRENCY:
                    state.condition.wait()
                else:
                    state.handlers_released = True
                    state.condition.notify_all()
                    break

        client_driver.events(test_constants.RPC_CONCURRENCY * _SUCCESS_CALL_FRACTION)
        with client_condition:
            for client_call in client_calls:
                client_call.cancel()

        with state.condition:
            server.shutdown(server_completion_queue, _SERVER_SHUTDOWN_TAG)
Exemplo n.º 18
0
 def setUp(self):
   self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
   self._server = grpc.server((_GenericHandler(weakref.proxy(self)),),
                              self._server_pool)
   port = self._server.add_insecure_port('[::]:0')
   self._server.start()
   self._channel = grpc.insecure_channel('localhost:%d' % port,
                                         options=_CHANNEL_ARGS)
Exemplo n.º 19
0
def linked_pair(servicer, default_timeout):
  """Creates a Server and Stub linked together for use."""
  link_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
  front_work_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
  front_transmission_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
  front_utility_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
  back_work_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
  back_transmission_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
  back_utility_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
  pools = (
      link_pool,
      front_work_pool, front_transmission_pool, front_utility_pool,
      back_work_pool, back_transmission_pool, back_utility_pool)

  link = in_memory.Link(link_pool)
  front = implementations.front_link(
      front_work_pool, front_transmission_pool, front_utility_pool)
  back = implementations.back_link(
      servicer, back_work_pool, back_transmission_pool, back_utility_pool,
      default_timeout, _MAXIMUM_TIMEOUT)
  front.join_rear_link(link)
  link.join_fore_link(front)
  back.join_fore_link(link)
  link.join_rear_link(back)

  return _LinkedPair(front, back, pools)
Exemplo n.º 20
0
    def setUp(self):
        server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
        self._server = grpc.server(server_pool)
        reflection.enable_server_reflection(_SERVICE_NAMES, self._server)
        port = self._server.add_insecure_port('[::]:0')
        self._server.start()

        channel = grpc.insecure_channel('localhost:%d' % port)
        self._stub = reflection_pb2_grpc.ServerReflectionStub(channel)
Exemplo n.º 21
0
 def __enter__(self):
   with self._lock:
     self._pool = logging_pool.pool(_THREAD_POOL_SIZE)
     self._front = tickets_implementations.front(
         self._pool, self._pool, self._pool)
     self._rear_link.start()
     self._rear_link.join_fore_link(self._front)
     self._front.join_rear_link(self._rear_link)
     self._under_stub = face_implementations.stub(self._front, self._pool)
Exemplo n.º 22
0
 def start(self):
   with self._lock:
     if self._server is None:
       self._completion_queue = _intermediary_low.CompletionQueue()
       self._server = _intermediary_low.Server(self._completion_queue)
     self._pool = logging_pool.pool(1)
     self._pool.submit(self._spin, self._completion_queue, self._server)
     self._server.start()
     self._server.service(None)
Exemplo n.º 23
0
Arquivo: fore.py Projeto: mindis/grpc
 def _start(self):
   with self._lock:
     self._pool = logging_pool.pool(_THREAD_POOL_SIZE)
     self._fore_link = ForeLink(
         self._pool, self._request_deserializers, self._response_serializers,
         self._root_certificates, self._key_chain_pairs, port=self._port)
     self._fore_link.join_rear_link(self._rear_link)
     self._fore_link.start()
     return self
Exemplo n.º 24
0
 def _start(self):
   with self._lock:
     self._pool = logging_pool.pool(_THREAD_POOL_SIZE)
     servicer = _servicer(self._implementations, self._pool)
     self._back = tickets_implementations.back(
         servicer, self._pool, self._pool, self._pool, _ONE_DAY_IN_SECONDS,
         _ONE_DAY_IN_SECONDS)
     self._fore_link.start()
     self._fore_link.join_rear_link(self._back)
     self._back.join_fore_link(self._fore_link)
Exemplo n.º 25
0
Arquivo: rear.py Projeto: mindis/grpc
 def _start(self):
   with self._lock:
     self._pool = logging_pool.pool(_THREAD_POOL_SIZE)
     self._rear_link = RearLink(
         self._host, self._port, self._pool, self._request_serializers,
         self._response_deserializers, self._secure, self._root_certificates,
         self._private_key, self._certificate_chain)
     self._rear_link.join_fore_link(self._fore_link)
     self._rear_link.start()
   return self
Exemplo n.º 26
0
  def start(self):
    """Starts this object.

    This method must be called before attempting to exchange tickets with this
    object.
    """
    with self._lock:
      self._completion_queue = _intermediary_low.CompletionQueue()
      self._pool = logging_pool.pool(1)
      self._pool.submit(self._spin, self._completion_queue)
Exemplo n.º 27
0
  def setUp(self):
    servicer = reflection.ReflectionServicer(service_names=_SERVICE_NAMES)
    server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
    self._server = grpc.server(server_pool)
    port = self._server.add_insecure_port('[::]:0')
    reflection_pb2.add_ServerReflectionServicer_to_server(servicer, self._server)
    self._server.start()

    channel = grpc.insecure_channel('localhost:%d' % port)
    self._stub = reflection_pb2.ServerReflectionStub(channel)
Exemplo n.º 28
0
 def setUp(self):
     self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
     self._trigger = _TestTrigger(test_constants.THREAD_CONCURRENCY)
     self._server = grpc.server(
         self._server_pool,
         handlers=(_GenericHandler(self._trigger),),
         maximum_concurrent_rpcs=test_constants.THREAD_CONCURRENCY)
     port = self._server.add_insecure_port('[::]:0')
     self._server.start()
     self._channel = grpc.insecure_channel('localhost:%d' % port)
Exemplo n.º 29
0
  def setUp(self):
    self._control = test_control.PauseFailControl()
    self._handler = _Handler(self._control)
    self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)

    self._server = grpc.server((), self._server_pool)
    port = self._server.add_insecure_port(b'[::]:0')
    self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),))
    self._server.start()

    self._channel = grpc.insecure_channel('localhost:%d' % port)
Exemplo n.º 30
0
 def __enter__(self):
   with self._lock:
     self._pool = logging_pool.pool(_THREAD_POOL_SIZE)
     self._front = tickets_implementations.front(
         self._pool, self._pool, self._pool)
     self._rear_link.start()
     self._rear_link.join_fore_link(self._front)
     self._front.join_rear_link(self._rear_link)
     self._behaviors = _behaviors(
         self._implementations, self._front, self._pool)
     return self
Exemplo n.º 31
0
  def setUp(self):
    servicer = health.HealthServicer()
    servicer.set('', health_pb2.HealthCheckResponse.SERVING)
    servicer.set('grpc.test.TestServiceServing',
                 health_pb2.HealthCheckResponse.SERVING)
    servicer.set('grpc.test.TestServiceUnknown',
                 health_pb2.HealthCheckResponse.UNKNOWN)
    servicer.set('grpc.test.TestServiceNotServing',
                 health_pb2.HealthCheckResponse.NOT_SERVING)
    server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
    self._server = grpc.server(server_pool)
    port = self._server.add_insecure_port('[::]:0')
    health_pb2.add_HealthServicer_to_server(servicer, self._server)
    self._server.start()

    channel = grpc.insecure_channel('localhost:%d' % port)
    self._stub = health_pb2.HealthStub(channel)
Exemplo n.º 32
0
 def test_reconnect(self):
     server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
     handler = grpc.method_handlers_generic_handler('test', {
         'UnaryUnary':
         grpc.unary_unary_rpc_method_handler(_handle_unary_unary)
     })
     server = grpc.server(server_pool, (handler, ))
     port = server.add_insecure_port('[::]:0')
     server.start()
     channel = grpc.insecure_channel('localhost:%d' % port)
     multi_callable = channel.unary_unary(_UNARY_UNARY)
     self.assertEqual(_RESPONSE, multi_callable(_REQUEST))
     server.stop(None)
     server = grpc.server(server_pool, (handler, ))
     server.add_insecure_port('[::]:{}'.format(port))
     server.start()
     self.assertEqual(_RESPONSE, multi_callable(_REQUEST))
Exemplo n.º 33
0
def test_method_service(patch_grpc):
    def handler(request, context):
        return b""

    server = grpc.server(
        logging_pool.pool(1),
        options=(("grpc.so_reuseport", 0),),
    )
    port = server.add_insecure_port("[::]:0")
    channel = grpc.insecure_channel("[::]:{}".format(port))
    server.add_generic_rpc_handlers((_UnaryUnaryRpcHandler(handler),))
    try:
        server.start()
        channel.unary_unary("/Servicer/Handler")(b"request")
        channel.unary_unary("/pkg.Servicer/Handler")(b"request")
    finally:
        server.stop(None)
Exemplo n.º 34
0
  def __init__(self,
               ex_factory: executor_factory.ExecutorFactory,
               num_clients: int = 0):
    port = portpicker.pick_unused_port()
    self._server_pool = logging_pool.pool(max_workers=1)
    self._server = grpc.server(self._server_pool)
    self._server.add_insecure_port('[::]:{}'.format(port))
    self._service = executor_service.ExecutorService(ex_factory=ex_factory)
    executor_pb2_grpc.add_ExecutorServicer_to_server(self._service,
                                                     self._server)
    self._server.start()
    self._channel = grpc.insecure_channel('localhost:{}'.format(port))
    self._stub = executor_pb2_grpc.ExecutorStub(self._channel)

    serialized_cards = executor_serialization.serialize_cardinalities(
        {placement_literals.CLIENTS: num_clients})
    self._stub.SetCardinalities(
        executor_pb2.SetCardinalitiesRequest(cardinalities=serialized_cards))
Exemplo n.º 35
0
    def testInsecure(self):
        server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
        handler = grpc.method_handlers_generic_handler('test', {
            'UnaryUnary':
            grpc.unary_unary_rpc_method_handler(handle_unary_unary)
        })
        server = grpc.server(server_pool, (handler, ))
        port = server.add_insecure_port('[::]:0')
        server.start()

        channel = grpc.insecure_channel('localhost:%d' % port)
        response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
        server.stop(None)

        auth_data = pickle.loads(response)
        self.assertIsNone(auth_data[_ID])
        self.assertIsNone(auth_data[_ID_KEY])
        self.assertDictEqual({}, auth_data[_AUTH_CTX])
Exemplo n.º 36
0
def server_and_stub(default_timeout,
                    inline_value_in_value_out_methods=None,
                    inline_value_in_stream_out_methods=None,
                    inline_stream_in_value_out_methods=None,
                    inline_stream_in_stream_out_methods=None,
                    event_value_in_value_out_methods=None,
                    event_value_in_stream_out_methods=None,
                    event_stream_in_value_out_methods=None,
                    event_stream_in_stream_out_methods=None,
                    multi_method=None):
    """Creates a Server and Stub linked together for use."""
    front_work_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
    front_transmission_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
    front_utility_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
    back_work_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
    back_transmission_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
    back_utility_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
    stub_pool = logging_pool.pool(_POOL_SIZE_LIMIT)
    pools = (front_work_pool, front_transmission_pool, front_utility_pool,
             back_work_pool, back_transmission_pool, back_utility_pool,
             stub_pool)

    servicer = implementations.servicer(
        back_work_pool,
        inline_value_in_value_out_methods=inline_value_in_value_out_methods,
        inline_value_in_stream_out_methods=inline_value_in_stream_out_methods,
        inline_stream_in_value_out_methods=inline_stream_in_value_out_methods,
        inline_stream_in_stream_out_methods=inline_stream_in_stream_out_methods,
        event_value_in_value_out_methods=event_value_in_value_out_methods,
        event_value_in_stream_out_methods=event_value_in_stream_out_methods,
        event_stream_in_value_out_methods=event_stream_in_value_out_methods,
        event_stream_in_stream_out_methods=event_stream_in_stream_out_methods,
        multi_method=multi_method)

    front = _tickets_implementations.front(front_work_pool,
                                           front_transmission_pool,
                                           front_utility_pool)
    back = _tickets_implementations.back(servicer, back_work_pool,
                                         back_transmission_pool,
                                         back_utility_pool, default_timeout,
                                         _MAXIMUM_TIMEOUT)
    front.join_rear_link(back)
    back.join_fore_link(front)

    stub = implementations.stub(front, stub_pool)

    return _LinkedPair(implementations.server(), stub, front, back, pools)
Exemplo n.º 37
0
  def setUp(self):
    """See unittest.TestCase.setUp for full specification.

    Overriding implementations must call this implementation.
    """
    self.control = control.PauseFailControl()
    self.digest_pool = logging_pool.pool(_MAXIMUM_POOL_SIZE)
    self.digest = digest.digest(
        stock_service.STOCK_TEST_SERVICE, self.control, self.digest_pool)

    self.server, self.stub, self.memo = self.set_up_implementation(
        self.digest.name, self.digest.methods,
        {}, {}, {}, {},
        self.digest.event_unary_unary_methods,
        self.digest.event_unary_stream_methods,
        self.digest.event_stream_unary_methods,
        self.digest.event_stream_stream_methods,
        None)
Exemplo n.º 38
0
 def setUp(self):
     self.memory_transmission_pool = logging_pool.pool(POOL_MAX_WORKERS)
     self.front_work_pool = logging_pool.pool(POOL_MAX_WORKERS)
     self.front_transmission_pool = logging_pool.pool(POOL_MAX_WORKERS)
     self.front_utility_pool = logging_pool.pool(POOL_MAX_WORKERS)
     self.back_work_pool = logging_pool.pool(POOL_MAX_WORKERS)
     self.back_transmission_pool = logging_pool.pool(POOL_MAX_WORKERS)
     self.back_utility_pool = logging_pool.pool(POOL_MAX_WORKERS)
     self.test_pool = logging_pool.pool(POOL_MAX_WORKERS)
     self.test_servicer = interfaces_test_case.TestServicer(self.test_pool)
     self.front = implementations.front_link(self.front_work_pool,
                                             self.front_transmission_pool,
                                             self.front_utility_pool)
     self.back = implementations.back_link(self.test_servicer,
                                           self.back_work_pool,
                                           self.back_transmission_pool,
                                           self.back_utility_pool,
                                           DEFAULT_TIMEOUT, MAXIMUM_TIMEOUT)
     self.front.join_rear_link(self.back)
     self.back.join_fore_link(self.front)
Exemplo n.º 39
0
  def instantiate(
      self, methods, method_implementations, multi_method_implementation):
    pool = logging_pool.pool(test_constants.POOL_SIZE)
    servicer = crust_implementations.servicer(
        method_implementations, multi_method_implementation, pool)
    serialization_behaviors = _serialization_behaviors_from_test_methods(
        methods)
    invocation_end_link = core_implementations.invocation_end_link()
    service_end_link = core_implementations.service_end_link(
        servicer, test_constants.DEFAULT_TIMEOUT,
        test_constants.MAXIMUM_TIMEOUT)
    service_grpc_link = service.service_link(
        serialization_behaviors.request_deserializers,
        serialization_behaviors.response_serializers)
    port = service_grpc_link.add_port('[::]:0', None)
    channel = _intermediary_low.Channel('localhost:%d' % port, None)
    invocation_grpc_link = invocation.invocation_link(
        channel, b'localhost', None,
        serialization_behaviors.request_serializers,
        serialization_behaviors.response_deserializers)

    invocation_end_link.join_link(invocation_grpc_link)
    invocation_grpc_link.join_link(invocation_end_link)
    service_grpc_link.join_link(service_end_link)
    service_end_link.join_link(service_grpc_link)
    service_end_link.start()
    invocation_end_link.start()
    invocation_grpc_link.start()
    service_grpc_link.start()

    generic_stub = crust_implementations.generic_stub(invocation_end_link, pool)
    # TODO(nathaniel): Add a "groups" attribute to _digest.TestServiceDigest.
    group = next(iter(methods))[0]
    # TODO(nathaniel): Add a "cardinalities_by_group" attribute to
    # _digest.TestServiceDigest.
    cardinalities = {
        method: method_object.cardinality()
        for (group, method), method_object in methods.iteritems()}
    dynamic_stub = crust_implementations.dynamic_stub(
        invocation_end_link, group, cardinalities, pool)

    return generic_stub, {group: dynamic_stub}, (
        invocation_end_link, invocation_grpc_link, service_grpc_link,
        service_end_link, pool)
Exemplo n.º 40
0
def test_context():
  port = portpicker.pick_unused_port()
  server_pool = logging_pool.pool(max_workers=1)
  server = grpc.server(server_pool)
  server.add_insecure_port('[::]:{}'.format(port))
  target_executor = eager_executor.EagerExecutor()
  service = executor_service.ExecutorService(target_executor)
  executor_pb2_grpc.add_ExecutorServicer_to_server(service, server)
  server.start()
  channel = grpc.insecure_channel('localhost:{}'.format(port))
  executor = remote_executor.RemoteExecutor(channel)
  set_default_executor.set_default_executor(executor)
  yield executor
  set_default_executor.set_default_executor()
  try:
    channel.close()
  except AttributeError:
    del channel
  server.stop(None)
Exemplo n.º 41
0
def _assemble(channel, host, metadata_transformer, request_serializers,
              response_deserializers, thread_pool, thread_pool_size):
    end_link = _core_implementations.invocation_end_link()
    grpc_link = invocation.invocation_link(channel, host, metadata_transformer,
                                           request_serializers,
                                           response_deserializers)
    if thread_pool is None:
        invocation_pool = logging_pool.pool(
            _DEFAULT_POOL_SIZE
            if thread_pool_size is None else thread_pool_size)
        assembly_pool = invocation_pool
    else:
        invocation_pool = thread_pool
        assembly_pool = None
    end_link.join_link(grpc_link)
    grpc_link.join_link(end_link)
    end_link.start()
    grpc_link.start()
    return end_link, grpc_link, invocation_pool, assembly_pool
Exemplo n.º 42
0
def create_and_start_server(port, servicer):
    """Create a server for pushing examples and starts it.


  Args:
    port: Port for the gRPC server to listen on.
    servicer: A PreprocessingServicer object.

  Returns:
    A grpc.Server object.
  """
    server = grpc.server(logging_pool.pool(max_workers=25))
    preprocessing_pb2_grpc.add_PreprocessingServicer_to_server(
        servicer, server)
    address = "[::]:%d" % port
    tf.logging.info("Create preprocessing server at %s", address)
    server.add_insecure_port(address)
    server.start()
    return server
Exemplo n.º 43
0
  def testConcurrentBlockingInvocations(self):
    pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
    requests = tuple(b'\x07\x08' for _ in range(test_constants.STREAM_LENGTH))
    expected_response = self._handler.handle_stream_unary(iter(requests), None)
    expected_responses = [expected_response] * test_constants.THREAD_CONCURRENCY
    response_futures = [None] * test_constants.THREAD_CONCURRENCY

    multi_callable = _stream_unary_multi_callable(self._channel)
    for index in range(test_constants.THREAD_CONCURRENCY):
      request_iterator = iter(requests)
      response_future = pool.submit(
          multi_callable, request_iterator,
          metadata=(('test', 'ConcurrentBlockingInvocations'),))
      response_futures[index] = response_future
    responses = tuple(
        response_future.result() for response_future in response_futures)

    pool.shutdown(wait=True)
    self.assertSequenceEqual(expected_responses, responses)
Exemplo n.º 44
0
def test_context(rpc_mode='REQUEST_REPLY'):
    port = portpicker.pick_unused_port()
    server_pool = logging_pool.pool(max_workers=1)
    server = grpc.server(server_pool)
    server.add_insecure_port('[::]:{}'.format(port))
    target_factory = executor_stacks.local_executor_factory(num_clients=3)
    tracers = []

    def _tracer_fn(cardinalities):
        tracer = executor_test_utils.TracingExecutor(
            target_factory.create_executor(cardinalities))
        tracers.append(tracer)
        return tracer

    service = executor_service.ExecutorService(
        executor_stacks.ResourceManagingExecutorFactory(_tracer_fn))
    executor_pb2_grpc.add_ExecutorServicer_to_server(service, server)
    server.start()

    channel = grpc.insecure_channel('localhost:{}'.format(port))
    stub = executor_pb2_grpc.ExecutorStub(channel)
    serialized_cards = executor_service_utils.serialize_cardinalities(
        {placement_literals.CLIENTS: 3})
    stub.SetCardinalities(
        executor_pb2.SetCardinalitiesRequest(cardinalities=serialized_cards))

    remote_exec = remote_executor.RemoteExecutor(channel, rpc_mode)
    executor = reference_resolving_executor.ReferenceResolvingExecutor(
        remote_exec)
    try:
        yield collections.namedtuple('_', 'executor tracers')(executor,
                                                              tracers)
    finally:
        executor.close()
        for tracer in tracers:
            tracer.close()
        try:
            channel.close()
        except AttributeError:
            pass  # Public gRPC channel doesn't support close()
        finally:
            server.stop(None)
Exemplo n.º 45
0
    def setUp(self):
        self._servicer = _Servicer()
        self._server_pool = logging_pool.pool(
            test_constants.THREAD_CONCURRENCY)
        self._server = grpc.server(self._server_pool,
                                   handlers=(_generic_handler(
                                       self._servicer), ))
        port = self._server.add_insecure_port('[::]:0')
        self._server.start()

        channel = grpc.insecure_channel('localhost:{}'.format(port))
        self._unary_unary = channel.unary_unary(
            '/'.join((
                '',
                _SERVICE,
                _UNARY_UNARY,
            )),
            request_serializer=_REQUEST_SERIALIZER,
            response_deserializer=_RESPONSE_DESERIALIZER,
        )
        self._unary_stream = channel.unary_stream(
            '/'.join((
                '',
                _SERVICE,
                _UNARY_STREAM,
            )), )
        self._stream_unary = channel.stream_unary(
            '/'.join((
                '',
                _SERVICE,
                _STREAM_UNARY,
            )), )
        self._stream_stream = channel.stream_stream(
            '/'.join((
                '',
                _SERVICE,
                _STREAM_STREAM,
            )),
            request_serializer=_REQUEST_SERIALIZER,
            response_deserializer=_RESPONSE_DESERIALIZER,
        )
  def testParallelInvocations(self):
    pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
    for (group, method), test_messages_sequence in (
        six.iteritems(self._digest.unary_unary_messages_sequences)):
      for test_messages in test_messages_sequence:
        requests = []
        response_futures = []
        for _ in range(test_constants.THREAD_CONCURRENCY):
          request = test_messages.request()
          response_future = pool.submit(
              self._invoker.blocking(group, method), request,
              test_constants.LONG_TIMEOUT)
          requests.append(request)
          response_futures.append(response_future)

        responses = [
            response_future.result() for response_future in response_futures]

        for request, response in zip(requests, responses):
          test_messages.verify(request, response, self)
    pool.shutdown(wait=True)
Exemplo n.º 47
0
 def _start(self):
     with self._lock:
         if self._pool is None:
             self._pool = logging_pool.pool(_THREAD_POOL_SIZE)
             servicer = _face_implementations.servicer(
                 self._pool, self._breakdown.implementations, None)
             self._back = _base_implementations.back_link(
                 servicer, self._pool, self._pool, self._pool,
                 _ONE_DAY_IN_SECONDS, _ONE_DAY_IN_SECONDS)
             self._fore_link = _fore.ForeLink(
                 self._pool,
                 self._breakdown.request_deserializers,
                 self._breakdown.response_serializers,
                 None,
                 self._key_chain_pairs,
                 port=self._port)
             self._back.join_fore_link(self._fore_link)
             self._fore_link.join_rear_link(self._back)
             self._fore_link.start()
         else:
             raise ValueError('Server currently running!')
Exemplo n.º 48
0
def test_context():
    port = portpicker.pick_unused_port()
    server_pool = logging_pool.pool(max_workers=1)
    server = grpc.server(server_pool)
    server.add_insecure_port('[::]:{}'.format(port))
    target_factory = executor_test_utils.LocalTestExecutorFactory(
        default_num_clients=3)
    tracers = []

    def _tracer_fn(cardinalities):
        tracer = executor_test_utils.TracingExecutor(
            target_factory.create_executor(cardinalities))
        tracers.append(tracer)
        return tracer

    service = executor_service.ExecutorService(
        executor_test_utils.BasicTestExFactory(_tracer_fn))
    executor_pb2_grpc.add_ExecutorGroupServicer_to_server(service, server)
    server.start()

    channel = grpc.insecure_channel('localhost:{}'.format(port))

    stub = remote_executor_grpc_stub.RemoteExecutorGrpcStub(channel)
    remote_exec = remote_executor.RemoteExecutor(stub)
    remote_exec.set_cardinalities({placements.CLIENTS: 3})
    executor = reference_resolving_executor.ReferenceResolvingExecutor(
        remote_exec)
    try:
        yield collections.namedtuple('_', 'executor tracers')(executor,
                                                              tracers)
    finally:
        executor.close()
        for tracer in tracers:
            tracer.close()
        try:
            channel.close()
        except AttributeError:
            pass  # Public gRPC channel doesn't support close()
        finally:
            server.stop(None)
Exemplo n.º 49
0
    def setUp(self):
        self._control = test_control.PauseFailControl()
        self._handler = _Handler(self._control)
        self._server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)

        self._record = []
        conditional_interceptor = _filter_server_interceptor(
            lambda x: ('secret', '42') in x.invocation_metadata,
            _LoggingInterceptor('s3', self._record))

        self._server = grpc.server(self._server_pool,
                                   options=(('grpc.so_reuseport', 0),),
                                   interceptors=(
                                       _LoggingInterceptor('s1', self._record),
                                       conditional_interceptor,
                                       _LoggingInterceptor('s2', self._record),
                                   ))
        port = self._server.add_insecure_port('[::]:0')
        self._server.add_generic_rpc_handlers((_GenericHandler(self._handler),))
        self._server.start()

        self._channel = grpc.insecure_channel('localhost:%d' % port)
  def testWaitingForSomeButNotAllParallelInvocations(self):
    pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
    for (group, method), test_messages_sequence in (
        six.iteritems(self._digest.unary_unary_messages_sequences)):
      for test_messages in test_messages_sequence:
        requests = []
        response_futures_to_indices = {}
        for index in range(test_constants.THREAD_CONCURRENCY):
          request = test_messages.request()
          response_future = pool.submit(
              self._invoker.blocking(group, method), request,
              test_constants.LONG_TIMEOUT)
          requests.append(request)
          response_futures_to_indices[response_future] = index

        some_completed_response_futures_iterator = itertools.islice(
            futures.as_completed(response_futures_to_indices),
            test_constants.THREAD_CONCURRENCY // 2)
        for response_future in some_completed_response_futures_iterator:
          index = response_futures_to_indices[response_future]
          test_messages.verify(requests[index], response_future.result(), self)
    pool.shutdown(wait=True)
Exemplo n.º 51
0
  def _start(self):
    with self._lock:
      if self._end_link is not None:
        raise ValueError('Cannot start already-started server!')

      if self._customer_pool is None:
        self._pool = logging_pool.pool(self._pool_size)
        assembly_pool = self._pool
      else:
        assembly_pool = self._customer_pool

      servicer = _GRPCServicer(
          _crust_implementations.servicer(
              self._implementations, self._multi_implementation, assembly_pool))

      self._end_link = _core_implementations.service_end_link(
          servicer, self._default_timeout, self._maximum_timeout)

      self._grpc_link.join_link(self._end_link)
      self._end_link.join_link(self._grpc_link)
      self._grpc_link.start()
      self._end_link.start()
Exemplo n.º 52
0
    def testWaitingForSomeButNotAllConcurrentFutureInvocations(self):
        pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
        request = b'\x67\x68'
        expected_response = self._handler.handle_unary_unary(request, None)
        response_futures = [None] * test_constants.THREAD_CONCURRENCY
        lock = threading.Lock()
        test_is_running_cell = [True]

        def wrap_future(future):

            def wrap():
                try:
                    return future.result()
                except grpc.RpcError:
                    with lock:
                        if test_is_running_cell[0]:
                            raise
                    return None

            return wrap

        multi_callable = _unary_unary_multi_callable(self._channel)
        for index in range(test_constants.THREAD_CONCURRENCY):
            inner_response_future = multi_callable.future(
                request,
                metadata=(
                    ('test',
                     'WaitingForSomeButNotAllConcurrentFutureInvocations'),))
            outer_response_future = pool.submit(
                wrap_future(inner_response_future))
            response_futures[index] = outer_response_future

        some_completed_response_futures_iterator = itertools.islice(
            futures.as_completed(response_futures),
            test_constants.THREAD_CONCURRENCY // 2)
        for response_future in some_completed_response_futures_iterator:
            self.assertEqual(expected_response, response_future.result())
        with lock:
            test_is_running_cell[0] = False
Exemplo n.º 53
0
    def testSecureNoCert(self):
        server_pool = logging_pool.pool(test_constants.THREAD_CONCURRENCY)
        handler = grpc.method_handlers_generic_handler('test', {
            'UnaryUnary':
            grpc.unary_unary_rpc_method_handler(handle_unary_unary)
        })
        server = grpc.server(server_pool, (handler, ))
        server_cred = grpc.ssl_server_credentials(_SERVER_CERTS)
        port = server.add_secure_port('[::]:0', server_cred)
        server.start()

        channel_creds = grpc.ssl_channel_credentials(
            root_certificates=_TEST_ROOT_CERTIFICATES)
        channel = grpc.secure_channel('localhost:{}'.format(port),
                                      channel_creds,
                                      options=_PROPERTY_OPTIONS)
        response = channel.unary_unary(_UNARY_UNARY)(_REQUEST)
        server.stop(None)

        auth_data = pickle.loads(response)
        self.assertIsNone(auth_data[_ID])
        self.assertIsNone(auth_data[_ID_KEY])
        self.assertDictEqual({'transport_security_type': [b'ssl']},
                             auth_data[_AUTH_CTX])
Exemplo n.º 54
0
def test_context(rpc_mode='REQUEST_REPLY'):
  port = portpicker.pick_unused_port()
  server_pool = logging_pool.pool(max_workers=1)
  server = grpc.server(server_pool)
  server.add_insecure_port('[::]:{}'.format(port))
  target_executor = executor_stacks.create_local_executor(num_clients=3)(None)
  tracer = executor_test_utils.TracingExecutor(target_executor)
  service = executor_service.ExecutorService(tracer)
  executor_pb2_grpc.add_ExecutorServicer_to_server(service, server)
  server.start()
  channel = grpc.insecure_channel('localhost:{}'.format(port))
  remote_exec = remote_executor.RemoteExecutor(channel, rpc_mode)
  executor = lambda_executor.LambdaExecutor(remote_exec)
  set_default_executor.set_default_executor(executor)
  try:
    yield collections.namedtuple('_', 'executor tracer')(executor, tracer)
  finally:
    set_default_executor.set_default_executor()
    try:
      channel.close()
    except AttributeError:
      pass  # Public gRPC channel doesn't support close()
    finally:
      server.stop(None)
Exemplo n.º 55
0
  def setUp(self):
    with mock.patch.object(bidaf_server.BidafServer,
                           '_InitializeEnvironment') as mock_method:
      port = portpicker.pick_unused_port()
      server_pool = logging_pool.pool(max_workers=10)
      self._server = grpc.server(server_pool)
      self._server.add_insecure_port('[::]:{}'.format(port))
      servicer = bidaf_server.BidafServer('BidafServer', 'test BiDAF server')
      servicer._environment = MockBidafEnvironment()

      aqa_pb2_grpc.add_EnvironmentServerServicer_to_server(
          servicer, self._server)
      self._server.start()

      channel = grpc.insecure_channel('localhost:%d' % port)
      self._stub = aqa_pb2_grpc.EnvironmentServerStub(channel)

      mock_method.assert_called_once_with(
          model_dir=None,
          data_dir=None,
          debug_mode=False,
          load_test=False,
          load_impossible_questions=False,
          shared_file=None)
Exemplo n.º 56
0
 def setUp(self):
     self.pool = logging_pool.pool(80)
Exemplo n.º 57
0
 def _start_server(self):
     self._server_pool = logging_pool.pool(1)
     self._server = grpc.server(self._server_pool)
     self._server.add_insecure_port("[::]:%d" % (_GRPC_PORT))
     add_HelloServicer_to_server(_HelloServicer(), self._server)
     self._server.start()
Exemplo n.º 58
0
    def testCancelManyCalls(self):
        server_thread_pool = logging_pool.pool(
            test_constants.THREAD_CONCURRENCY)

        server_completion_queue = cygrpc.CompletionQueue()
        server = cygrpc.Server(
            cygrpc.ChannelArgs([cygrpc.ChannelArg(b'grpc.so_reuseport', 0)]))
        server.register_completion_queue(server_completion_queue)
        port = server.add_http2_port(b'[::]:0')
        server.start()
        channel = cygrpc.Channel('localhost:{}'.format(port).encode(),
                                 cygrpc.ChannelArgs([]))

        state = _State()

        server_thread_args = (
            state,
            server,
            server_completion_queue,
            server_thread_pool,
        )
        server_thread = threading.Thread(target=_serve,
                                         args=server_thread_args)
        server_thread.start()

        client_condition = threading.Condition()
        client_due = set()
        client_completion_queue = cygrpc.CompletionQueue()
        client_driver = _QueueDriver(client_condition, client_completion_queue,
                                     client_due)
        client_driver.start()

        with client_condition:
            client_calls = []
            for index in range(test_constants.RPC_CONCURRENCY):
                client_call = channel.create_call(None, _EMPTY_FLAGS,
                                                  client_completion_queue,
                                                  b'/twinkies', None,
                                                  _INFINITE_FUTURE)
                operations = (
                    cygrpc.operation_send_initial_metadata(
                        _EMPTY_METADATA, _EMPTY_FLAGS),
                    cygrpc.operation_send_message(b'\x45\x56', _EMPTY_FLAGS),
                    cygrpc.operation_send_close_from_client(_EMPTY_FLAGS),
                    cygrpc.operation_receive_initial_metadata(_EMPTY_FLAGS),
                    cygrpc.operation_receive_message(_EMPTY_FLAGS),
                    cygrpc.operation_receive_status_on_client(_EMPTY_FLAGS),
                )
                tag = 'client_complete_call_{0:04d}_tag'.format(index)
                client_call.start_client_batch(operations, tag)
                client_due.add(tag)
                client_calls.append(client_call)

        with state.condition:
            while True:
                if state.parked_handlers < test_constants.THREAD_CONCURRENCY:
                    state.condition.wait()
                elif state.handled_rpcs < test_constants.RPC_CONCURRENCY:
                    state.condition.wait()
                else:
                    state.handlers_released = True
                    state.condition.notify_all()
                    break

        client_driver.events(test_constants.RPC_CONCURRENCY *
                             _SUCCESS_CALL_FRACTION)
        with client_condition:
            for client_call in client_calls:
                client_call.cancel()

        with state.condition:
            server.shutdown(server_completion_queue, _SERVER_SHUTDOWN_TAG)
Exemplo n.º 59
0
def _start_server():
    """Starts an insecure grpc server."""
    return grpc.server(logging_pool.pool(max_workers=1),
                       options=(('grpc.so_reuseport', 0), ))
Exemplo n.º 60
0
 def _start(self):
     with self._condition:
         self._state = _PoolRelay._State.IDLE
         if self._own_pool:
             self._pool = logging_pool.pool(1)
         return self