def test_proxy_manual_start_stop(container_factory, rabbit_config): container = container_factory(FooService, rabbit_config) container.start() foobar_proxy = ServiceRpcProxy('foobar', rabbit_config) foo = foobar_proxy.start() assert foo.spam(ham='eggs') == 'eggs' assert foo.spam(ham='eggs') == 'eggs' # test re-use foobar_proxy.stop()
def test_proxy_context_data(container_factory, rabbit_config): container = container_factory(FooService, rabbit_config) container.start() context_data = {'language': 'en'} with ServiceRpcProxy('foobar', rabbit_config, context_data) as foo: assert foo.get_context_data('language') == 'en' context_data = {'language': 'fr'} with ServiceRpcProxy('foobar', rabbit_config, context_data) as foo: assert foo.get_context_data('language') == 'fr'
def test_multiple_proxies(container_factory, rabbit_config): container = container_factory(FooService, rabbit_config) container.start() with ServiceRpcProxy('foobar', rabbit_config) as proxy1: res1 = proxy1.spam.call_async(ham=1) with ServiceRpcProxy('foobar', rabbit_config) as proxy2: res2 = proxy2.spam.call_async(ham=2) assert res1.result() == 1 assert res2.result() == 2
def test_replace_dependencies_args(container_factory, rabbit_config): class Service(object): name = "service" foo_proxy = RpcProxy("foo_service") bar_proxy = RpcProxy("bar_service") baz_proxy = RpcProxy("baz_service") @rpc def method(self, arg): self.foo_proxy.remote_method(arg) container = container_factory(Service, rabbit_config) # replace a single dependency foo_proxy = replace_dependencies(container, "foo_proxy") # replace multiple dependencies replacements = replace_dependencies(container, "bar_proxy", "baz_proxy") assert len([x for x in replacements]) == 2 # verify that container.extensions doesn't include an RpcProxy anymore assert all([not isinstance(dependency, RpcProxy) for dependency in container.extensions]) container.start() # verify that the mock dependency collects calls msg = "msg" with ServiceRpcProxy("service", rabbit_config) as service_proxy: service_proxy.method(msg) foo_proxy.remote_method.assert_called_once_with(msg)
def send(self, method, service_name, msg): # logger.info("[SEND] Service Name : {} AMQP_URI : {}".format(service_name, self.AMQP_URI)) with ServiceRpcProxy(service_name, self.AMQP_URI) as rpc: call_method = getattr(rpc, method) response = call_method.call_async(msg) return response.result()
def test_rpc_unknown_service_standalone(rabbit_config): with ServiceRpcProxy("unknown_service", rabbit_config) as proxy: with pytest.raises(UnknownService) as exc_info: proxy.anything() assert exc_info.value._service_name == 'unknown_service'
def test_user_custom(self, container_factory, service_cls, config): config['SENTRY']['USER_TYPE_CONTEXT_KEYS'] = ( 'user|email', # excludes session 'other_pattern') container = container_factory(service_cls, config) container.start() user_data = { 'user': '******', 'username': '******', 'user_id': 1, 'email': '*****@*****.**', 'email_address': '*****@*****.**', } context_data = { 'session_id': 1, # excluded from user data 'language': 'en-gb' } context_data.update(user_data) with ServiceRpcProxy('service', config, context_data=context_data) as rpc_proxy: with pytest.raises(RemoteError): rpc_proxy.broken() sentry = get_extension(container, SentryReporter) assert sentry.client.send.call_count == 1 _, kwargs = sentry.client.send.call_args assert kwargs['user'] == user_data assert "session_id" not in kwargs['user']
def test_runner_with_duplicate_services(runner_factory, rabbit_config): # host Service multiple times runner = runner_factory(rabbit_config) runner.add_service(Service) runner.add_service(Service) # no-op runner.start() # it should only be hosted once assert len(runner.containers) == 1 # test events (only one service is hosted) event_data = "msg" dispatch = event_dispatcher(rabbit_config) dispatch('srcservice', 'testevent', event_data) with eventlet.Timeout(1): while len(received) == 0: eventlet.sleep() assert received == [event_data] # test rpc arg = "msg" del received[:] with ServiceRpcProxy("service", rabbit_config) as proxy: proxy.handle(arg) assert received == [arg]
def test_graceful_stop_on_one_container_error(runner_factory, rabbit_config): runner = runner_factory(rabbit_config, ExampleService, SecondService) runner.start() container = get_container(runner, ExampleService) second_container = get_container(runner, SecondService) original_stop = second_container.stop with patch.object(second_container, 'stop', autospec=True, wraps=original_stop) as stop: rpc_consumer = get_extension(container, RpcConsumer) with patch.object(rpc_consumer, 'handle_result', autospec=True) as handle_result: exception = Exception("error") handle_result.side_effect = exception # use a standalone rpc proxy to call exampleservice.task() with ServiceRpcProxy("exampleservice", rabbit_config) as proxy: # proxy.task() will hang forever because it generates an error # in the remote container (so never receives a response). proxy.task.call_async() # verify that the error bubbles up to runner.wait() with pytest.raises(Exception) as exc_info: runner.wait() assert exc_info.value == exception # Check that the second service was stopped due to the first # service being killed stop.assert_called_once_with()
def test_expected_exception_not_reported( exception_cls, expected_count, container_factory, config ): class Service(object): name = "service" sentry = SentryReporter() @rpc(expected_exceptions=CustomException) def broken(self): raise exception_cls("Error!") config['SENTRY']['REPORT_EXPECTED_EXCEPTIONS'] = False container = container_factory(Service, config) container.start() with entrypoint_waiter(container, 'broken') as result: with ServiceRpcProxy('service', config) as rpc_proxy: with pytest.raises(RemoteError): rpc_proxy.broken() with pytest.raises(exception_cls): result.get() sentry = get_extension(container, SentryReporter) assert sentry.client.send.call_count == expected_count
def test_user_defaults(self, container_factory, service_cls, config): user_data = { 'user': '******', 'username': '******', 'user_id': 1, 'email': '*****@*****.**', 'email_address': '*****@*****.**', 'session_id': 1 } container = container_factory(service_cls, config) container.start() context_data = { 'language': 'en-gb' } context_data.update(user_data) with ServiceRpcProxy( 'service', config, context_data=context_data ) as rpc_proxy: with pytest.raises(RemoteError): rpc_proxy.broken() sentry = get_extension(container, SentryReporter) assert sentry.client.send.call_count == 1 _, kwargs = sentry.client.send.call_args assert kwargs['user'] == user_data
def test_proxy_queue_expired_even_if_unused(rabbit_manager, rabbit_config): vhost = rabbit_config['vhost'] with ServiceRpcProxy('exampleservice', rabbit_config): assert len(rabbit_manager.get_queues(vhost)) == 1 eventlet.sleep(.15) # sleep for >TTL assert len(rabbit_manager.get_queues(vhost)) == 0
def test_custom_serializer(container_factory, rabbit_config, sniffer_queue_factory): def encode(value): value = json.dumps(value) return value.upper() def decode(value): value = value.lower() return json.loads(value) register("upperjson", encode, decode, "application/x-upper-json", "utf-8") class Service(object): name = "service" @rpc def echo(self, arg): return arg rabbit_config[SERIALIZER_CONFIG_KEY] = "upperjson" container = container_factory(Service, rabbit_config) container.start() get_messages = sniffer_queue_factory('nameko-rpc') # verify RPC works end-to-end with ServiceRpcProxy('service', rabbit_config) as proxy: assert proxy.echo("hello") == "hello" # verify sniffed messages serialized as expected msg = get_messages()[0] assert '"RESULT": "HELLO"' in msg['payload'] assert msg['properties']['content_type'] == "application/x-upper-json"
def test_custom_serializer(container_factory, rabbit_config, sniffer_queue_factory): class Service(object): name = "service" @rpc def echo(self, arg): return arg rabbit_config[SERIALIZER_CONFIG_KEY] = "upperjson" rabbit_config[SERIALIZERS_CONFIG_KEY] = { 'upperjson': { 'encoder': 'test.test_serialization.upperjson_encode', 'decoder': 'test.test_serialization.upperjson_decode', 'content_type': 'application/x-upper-json' } } container = container_factory(Service, rabbit_config) container.start() get_messages = sniffer_queue_factory('nameko-rpc') # verify RPC works end-to-end with ServiceRpcProxy('service', rabbit_config) as proxy: assert proxy.echo("hello") == "hello" # verify sniffed messages serialized as expected msg = get_messages()[0] assert '"RESULT": "HELLO"' in msg['payload'] assert msg['properties']['content_type'] == "application/x-upper-json"
def test_runner_with_duplicate_services(runner_factory, rabbit_config, service_cls, tracker): # host Service multiple times runner = runner_factory(rabbit_config) runner.add_service(service_cls) runner.add_service(service_cls) # no-op runner.start() # it should only be hosted once assert len(runner.containers) == 1 container = list(runner.containers)[0] # test events (only one service is hosted) event_data = "event" dispatch = event_dispatcher(rabbit_config) with entrypoint_waiter(container, "handle"): dispatch('srcservice', "testevent", event_data) assert tracker.call_args_list == [call(event_data)] # test rpc arg = "arg" with ServiceRpcProxy("service", rabbit_config) as proxy: proxy.handle(arg) assert tracker.call_args_list == [call(event_data), call(arg)]
def test_tags_custom(self, container_factory, service_cls, config): config['SENTRY']['TAG_TYPE_CONTEXT_KEYS'] = ('session', 'other_pattern') container = container_factory(service_cls, config) container.start() context_data = { 'call_id_stack': ["standalone_rpc_proxy.call.0"], 'session_id': 1, 'email_address': '*****@*****.**', } with ServiceRpcProxy('service', config, context_data=context_data) as rpc_proxy: with pytest.raises(RemoteError): rpc_proxy.broken() sentry = get_extension(container, SentryReporter) assert sentry.client.send.call_count == 1 expected_tags = { 'site': config['SENTRY']['CLIENT_CONFIG']['site'], 'call_id': 'service.broken.1', 'parent_call_id': 'standalone_rpc_proxy.call.0', 'service_name': 'service', 'method_name': 'broken', 'session_id': '1', # extra } _, kwargs = sentry.client.send.call_args assert expected_tags == kwargs['tags']
def service_rpc(self, toxiproxy, rabbit_config): config = rabbit_config.copy() config['AMQP_URI'] = toxiproxy.uri with ServiceRpcProxy("service", config) as proxy: yield proxy
def test_extra(self, container_factory, service_cls, config): container = container_factory(service_cls, config) container.start() context_data = { 'language': 'en-gb' } with ServiceRpcProxy( 'service', config, context_data=context_data ) as rpc_proxy: with pytest.raises(RemoteError): rpc_proxy.broken() sentry = get_extension(container, SentryReporter) assert sentry.client.send.call_count == 1 expected_extra = { 'call_id_stack': ( repr(u"standalone_rpc_proxy.call.0"), repr(u"service.broken.1") ), 'language': repr(u"en-gb"), 'sys.argv': ANY } _, kwargs = sentry.client.send.call_args assert kwargs['extra'] == expected_extra
def test_dependency_provider_includes_context_data_in_grpc_request_metadata( self, amqprpc_service, rabbit_config): with ServiceRpcProxy("amqp", context_data={"a": "A"}) as proxy_rpc: context_data = json.loads(proxy_rpc.proxy()) # added by RPC client, propagated by GRPC assert context_data["a"] == "A" # added by GRPC client as metadata assert context_data["x"] == "X" assert context_data[ "x-bin"] == "CgsKCwoL" # base64-encoded for response # NOTE adding binary objects to context_data will cause problems for # extensions that can't serialize them (e.g. AMQP entrypoints in their # default configuration); this is a general problem when mixing extensions. # opportunity to improve this when nameko grows its own serialization layer # call id stack is propagated assert context_data["call_id_stack"] == [ "standalone_rpc_client.0.0", "amqp.proxy.1", "advanced.unary_unary.2", ]
def test_context_merge(self, container_factory, service_cls, config): container = container_factory(service_cls, config) container.start() user_data = { 'user': '******' } context_data = { 'language': 'en-gb' } context_data.update(user_data) data = {'foo': 'bar'} with ServiceRpcProxy( 'service', config, context_data=context_data ) as rpc_proxy: with pytest.raises(RemoteError): rpc_proxy.broken(data) sentry = get_extension(container, SentryReporter) assert sentry.client.send.call_count == 1 _, kwargs = sentry.client.send.call_args assert kwargs['user'] == user_data assert kwargs['arbitrary'] == data
def test_greenthread_raise_in_kill(container_factory, rabbit_config, logger): class Service(object): name = "service" @rpc def echo(self, arg): return arg # pragma: no cover container = container_factory(Service, rabbit_config) queue_consumer = get_extension(container, QueueConsumer) rpc_consumer = get_extension(container, RpcConsumer) # an error in rpc_consumer.handle_message will kill the queue_consumer's # greenthread. when the container suicides and kills the queue_consumer, # it should warn instead of re-raising the original exception exc = Exception('error handling message') with patch.object(rpc_consumer, 'handle_message') as handle_message: handle_message.side_effect = exc container.start() with ServiceRpcProxy('service', rabbit_config) as service_rpc: # spawn because `echo` will never respond eventlet.spawn(service_rpc.echo, "foo") # container will have died with the messaging handling error with pytest.raises(Exception) as exc_info: container.wait() assert str(exc_info.value) == "error handling message" # queueconsumer will have warned about the exc raised by its greenthread assert logger.warn.call_args_list == [ call("QueueConsumer %s raised `%s` during kill", queue_consumer, exc) ]
def test_consumer_replacing(container_factory, rabbit_manager, rabbit_config): container = container_factory(FooService, rabbit_config) container.start() class FakeRepliesDict(dict): # act like the internal replies dict, but keep a list of messages # passing through for later inspection def __init__(self): self.messages = [] def __setitem__(self, key, value): self.messages.append(value) super(FakeRepliesDict, self).__setitem__(key, value) fake_replies = FakeRepliesDict() with ServiceRpcProxy('foobar', rabbit_config) as proxy: # extra setup, as after e.g. connection error proxy.reply_listener.queue_consumer._setup_consumer() with patch.object( proxy.reply_listener.queue_consumer, 'replies', fake_replies ): count = 10 replies = [proxy.spam.call_async('hello') for _ in range(count)] assert [reply.result() for reply in replies] == ['hello'] * count consumer_tags = set() # there should only be a single consumer. we check by looking at the # consumer tag on the received messages for _, message in fake_replies.messages: consumer_tags.add(message.delivery_info['consumer_tag']) assert len(consumer_tags) == 1
def test_rpc_headers(container_factory, rabbit_config): container = container_factory(ExampleService, rabbit_config) context_data = {'language': 'en', 'otherheader': 'othervalue'} headers = {} rpc_consumer = get_extension(container, RpcConsumer) handle_message = rpc_consumer.handle_message with patch.object(rpc_consumer, 'handle_message', autospec=True) as patched_handler: def side_effect(body, message): headers.update(message.headers) # extract message headers return handle_message(body, message) patched_handler.side_effect = side_effect container.start() # use a standalone rpc proxy to call exampleservice.say_hello() with ServiceRpcProxy("exampleservice", rabbit_config, context_data) as proxy: proxy.say_hello() # headers as per context data, plus call stack assert headers == { 'nameko.language': 'en', 'nameko.otherheader': 'othervalue', 'nameko.call_id_stack': ['standalone_rpc_proxy.call.0'], }
def test_activate_deactivate(self, container_factory, service_cls, config): container = container_factory(service_cls, config) container.start() with ServiceRpcProxy('service', config) as rpc_proxy: with pytest.raises(RemoteError): rpc_proxy.activate_deactivate("a", "b", "c") sentry = get_extension(container, SentryReporter) assert sentry.client.send.call_count == 1 _, kwargs = sentry.client.send.call_args breadcrumbs = [ crumb for crumb in kwargs['breadcrumbs']['values'] if crumb['category'] == "worker" ] assert breadcrumbs == [{ 'category': "worker", 'data': None, 'level': ANY, 'message': 'a', 'timestamp': ANY, 'type': 'default' }, { 'category': "worker", 'data': None, 'level': ANY, 'message': 'c', 'timestamp': ANY, 'type': 'default' }]
def test_breadcrumbs(self, method, container_factory, service_cls, config): container = container_factory(service_cls, config) container.start() data = {'foo': 'bar'} with ServiceRpcProxy('service', config) as rpc_proxy: with pytest.raises(RemoteError): getattr(rpc_proxy, method)(data) sentry = get_extension(container, SentryReporter) assert sentry.client.send.call_count == 1 _, kwargs = sentry.client.send.call_args breadcrumbs = [ crumb for crumb in kwargs['breadcrumbs']['values'] if crumb['category'] == "worker" ] assert breadcrumbs == [{ 'category': 'worker', 'data': data, 'level': 'warning', 'message': 'breadcrumb message', 'timestamp': ANY, 'type': 'default' }]
def test_consumer_replacing(container_factory, rabbit_manager, rabbit_config): container = container_factory(FooService, rabbit_config) container.start() class FakeRepliesDict(dict): # act like the internal replies dict, but keep a list of messages # passing through for later inspection def __init__(self): self.messages = [] def __setitem__(self, key, value): self.messages.append(value) super(FakeRepliesDict, self).__setitem__(key, value) fake_replies = FakeRepliesDict() with ServiceRpcProxy('foobar', rabbit_config) as proxy: # extra setup, as after e.g. connection error proxy.reply_listener.queue_consumer._setup_consumer() with patch.object( proxy.reply_listener.queue_consumer, 'replies', fake_replies ): count = 10 replies = [proxy.spam.async('hello') for _ in range(count)] assert [reply.result() for reply in replies] == ['hello'] * count
def test_predictable_call_ids(runner_factory, rabbit_config): worker_contexts = [] class CaptureWorkerContext(DependencyProvider): def worker_setup(self, worker_ctx): worker_contexts.append(worker_ctx) class ServiceX(object): name = "x" capture = CaptureWorkerContext() service_y = RpcProxy("y") @rpc def method(self): self.service_y.method() class ServiceY(object): name = "y" capture = CaptureWorkerContext() @rpc def method(self): pass runner = runner_factory(rabbit_config, ServiceX, ServiceY) runner.start() with ServiceRpcProxy("x", rabbit_config) as service_x: service_x.method() call_ids = [worker_ctx.call_id for worker_ctx in worker_contexts] assert call_ids == ["x.method.1", "y.method.2"]
def test_rpc_over_ssl(container_factory, rabbit_ssl_config): container = container_factory(ExampleService, rabbit_ssl_config) container.start() with ServiceRpcProxy("exampleservice", rabbit_ssl_config) as proxy: assert proxy.echo("a", "b", foo="bar") == [['a', 'b'], {'foo': 'bar'}]
def test_proxy_worker_context(container_factory, rabbit_config): container = container_factory(FooService, rabbit_config, CustomWorkerContext) container.start() context_data = {'custom_header': 'custom_value'} with ServiceRpcProxy( 'foobar', rabbit_config, context_data, worker_ctx_cls=CustomWorkerContext ) as foo: assert foo.get_context_data('custom_header') == "custom_value" with ServiceRpcProxy('foobar', rabbit_config, context_data) as foo: assert foo.get_context_data('custom_header') is None
def test_multiple_calls_to_result(container_factory, rabbit_config): container = container_factory(FooService, rabbit_config) container.start() with ServiceRpcProxy('foobar', rabbit_config) as proxy: res = proxy.spam.call_async(ham=1) res.result() res.result()
def test_proxy(container_factory, rabbit_config): container = container_factory(FooService, rabbit_config) container.start() with ServiceRpcProxy('foobar', rabbit_config) as foo: assert foo.spam(ham='eggs') == 'eggs' assert foo.spam(ham='eggs') == 'eggs' # test re-use
def test_service_disconnect_with_active_rpc_worker( container_factory, rabbit_manager, rabbit_config): """ Break the connection between a service's queue consumer and rabbit while the service has an active rpc worker (i.e. response required). """ container = container_factory(ExampleService, rabbit_config) container.start() # get the service's queue consumer connection while we know it's the # only active connection vhost = rabbit_config['vhost'] connections = get_rabbit_connections(vhost, rabbit_manager) assert len(connections) == 1 queue_consumer_conn = connections[0]['name'] # create a standalone RPC proxy towards the target service rpc_proxy = ServiceRpcProxy('exampleservice', rabbit_config) proxy = rpc_proxy.start() # there should now be two connections: # 1. the queue consumer from the target service # 2. the queue consumer in the standalone rpc proxy connections = get_rabbit_connections(vhost, rabbit_manager) assert len(connections) == 2 # disconnect the service's queue consumer while it's running a worker eventlet.spawn(disconnect_on_event, rabbit_manager, queue_consumer_conn) # we should receive the response from the first call # the standalone RPC proxy will stop listening as soon as it receives # a reply, so the duplicate response is discarded arg = uuid.uuid4().hex assert proxy.method(arg) == arg # `method` will have been called twice with the same the `arg`, because # rabbit will have redelivered the un-ack'd message from the first call def method_called_twice(): assert method_called.call_args_list == [call(arg), call(arg)] assert_stops_raising(method_called_twice) connections = get_rabbit_connections(vhost, rabbit_manager) assert queue_consumer_conn not in [conn['name'] for conn in connections] rpc_proxy.stop()
def make_proxy(service_name, **kwargs): proxy = ServiceRpcProxy(service_name, container_config, **kwargs) all_proxies.append(proxy) return proxy.start()