async def test_multiple_rpc_transports(loop, redis_server_url, redis_server_b_url, consume_rpcs): """Configure a bus with two redis transports and ensure they write to the correct redis servers""" redis_url_a = redis_server_url redis_url_b = redis_server_b_url logging.warning(f"Server A url: {redis_url_a}") logging.warning(f"Server B url: {redis_url_b}") config = Config.load_dict({ "bus": { "schema": { "transport": { "redis": { "url": redis_url_a } } } }, "apis": { "default": { "rpc_transport": { "redis": { "url": redis_url_a } }, "result_transport": { "redis": { "url": redis_url_a } }, }, "api_b": { "rpc_transport": { "redis": { "url": redis_url_b } }, "result_transport": { "redis": { "url": redis_url_b } }, }, }, }) bus = lightbus.create(config=config) bus.client.register_api(ApiA()) bus.client.register_api(ApiB()) task = asyncio.ensure_future(consume_rpcs(bus)) await asyncio.sleep(0.1) await bus.api_a.rpc_a.call_async() await bus.api_b.rpc_b.call_async() await asyncio.sleep(0.1) await cancel(task) await bus.client.close_async()
def test_load_bus_config_file_yaml(tmp_directory: Path): config_file = tmp_directory / "config.yaml" with config_file.open(mode="w") as f: f.write(EXAMPLE_VALID_YAML) config = Config.load_file(str(config_file)) assert config.bus().log_level == LogLevelEnum.WARNING
async def test_validation_event(loop, bus: lightbus.path.BusPath, dummy_api, mocker): """Check validation happens when firing an event""" config = Config.load_dict({"apis": {"default": {"validate": True, "strict_validation": True}}}) bus.client.config = config mocker.patch("jsonschema.validate", autospec=True) async def co_listener(*a, **kw): pass await bus.client.schema.add_api(dummy_api) await bus.client.schema.save_to_bus() await bus.client.schema.load_from_bus() listener_task = await bus.client.listen_for_event("my.dummy", "my_event", co_listener) await asyncio.sleep(0.1) await bus.my.dummy.my_event.fire_async(field="Hello") await cancel(listener_task) # Validate gets called jsonschema.validate.assert_called_with( {"field": "Hello"}, { "$schema": "http://json-schema.org/draft-04/schema#", "type": "object", "additionalProperties": False, "properties": {"field": {"type": "string"}}, "required": ["field"], "title": "Event my.dummy.my_event parameters", }, )
def redis_no_default_config(): return Config.load_dict( { "bus": { "schema": { "transport": { "redis": {} } } }, "apis": { "other": { "rpc_transport": { "redis": {} }, "result_transport": { "redis": {} }, "event_transport": { "redis": {} }, } }, }, set_defaults=False, )
def create_bus_client_with_unhappy_schema(validate=True, strict_validation=True): # Use the base transport as a dummy, it only needs to have a # close() method on it in order to keep the client.close() method happy schema = Schema( schema_transport=TransportPool( transport_class=lightbus.Transport, config=None, transport_config=NamedTuple("DummyTransportConfig")(), ) ) # Fake loading of remote schemas from schema transport schema._remote_schemas = {} config = Config.load_dict( {"apis": {"default": {"validate": validate, "strict_validation": strict_validation}}} ) fake_schema = {"parameters": {"p": {}}, "response": {}} mocker.patch.object(schema, "get_rpc_schema", autospec=True, return_value=fake_schema), mocker.patch.object(schema, "get_event_schema", autospec=True, return_value=fake_schema), # Make sure the test api named "api" has a schema, otherwise strict_validation # will fail it schema.local_schemas["api"] = fake_schema mocker.patch( "jsonschema.validate", autospec=True, side_effect=ValidationError("test error") ), dummy_bus.client.schema = schema dummy_bus.client.config = config return dummy_bus.client
async def test_validation_rpc(loop, bus: lightbus.path.BusPath, dummy_api, mocker): """Check validation happens when performing an RPC""" config = Config.load_dict({"apis": {"default": {"validate": True, "strict_validation": True}}}) bus.client.config = config mocker.patch("jsonschema.validate", autospec=True) async def co_consume_rpcs(): return await bus.client.consume_rpcs(apis=[dummy_api]) await bus.client.schema.add_api(dummy_api) await bus.client.schema.save_to_bus() await bus.client.schema.load_from_bus() consume_task = asyncio.ensure_future(co_consume_rpcs(), loop=loop) await asyncio.sleep(0.1) result = await bus.my.dummy.my_proc.call_async(field="Hello") await cancel(consume_task) assert result == "value: Hello" # Validate gets called jsonschema.validate.assert_called_with( "value: Hello", { "$schema": "http://json-schema.org/draft-04/schema#", "title": "RPC my.dummy.my_proc() response", "type": "string", }, )
def parse_args(args=None): parser = argparse.ArgumentParser( description="Lightbus management command.") subparsers = parser.add_subparsers(help="Commands", dest="subcommand") subparsers.required = True # Allow each command to set up its own arguments lightbus.commands.run.Command().setup(parser, subparsers) lightbus.commands.shell.Command().setup(parser, subparsers) lightbus.commands.dump_schema.Command().setup(parser, subparsers) lightbus.commands.dump_config_schema.Command().setup(parser, subparsers) lightbus.commands.inspect.Command().setup(parser, subparsers) lightbus.commands.version.Command().setup(parser, subparsers) # Create a temporary plugin registry in order to run the before_parse_args hook plugin_registry = PluginRegistry() plugin_registry.autoload_plugins(config=Config.load_dict({})) block( plugin_registry.execute_hook("before_parse_args", parser=parser, subparsers=subparsers), timeout=5, ) args = parser.parse_args(sys.argv[1:] if args is None else args) # Note that we don't have an after_parse_args plugin hook. Instead we use the receive_args # hook which is called once we have instantiated our plugins return args
def test_autoload_plugins(plugin_registry: PluginRegistry): config = Config.load_dict( {"plugins": {"internal_state": {"enabled": True}, "internal_metrics": {"enabled": True}}} ) assert not plugin_registry._plugins assert plugin_registry.autoload_plugins(config) assert [type(p) for p in plugin_registry._plugins] == [StatePlugin, MetricsPlugin]
async def test_listen_to_multiple_events_across_multiple_transports( loop, redis_server_url, redis_server_b_url, worker: Worker): redis_url_a = redis_server_url redis_url_b = redis_server_b_url logging.warning(f"Server A URL: {redis_url_a}") logging.warning(f"Server B URL: {redis_url_b}") config = Config.load_dict({ "bus": { "schema": { "transport": { "redis": { "url": redis_url_a } } } }, "apis": { "default": { "event_transport": { "redis": { "url": redis_url_a } } }, "api_b": { "event_transport": { "redis": { "url": redis_url_b } } }, }, }) bus = lightbus.create(config=config) bus.client.disable_proxy() bus.client.register_api(ApiA()) bus.client.register_api(ApiB()) await asyncio.sleep(0.1) calls = 0 def listener(*args, **kwargs): nonlocal calls calls += 1 bus.client.listen_for_events(events=[("api_a", "event_a"), ("api_b", "event_b")], listener=listener, listener_name="test") async with worker(bus): await asyncio.sleep(0.1) await bus.api_a.event_a.fire_async() await bus.api_b.event_b.fire_async() await asyncio.sleep(0.1) assert calls == 2
def test_plugin_selector_config(): config = Config.load_dict({}) assert hasattr(config._config.plugins, "internal_state") assert hasattr(config._config.plugins, "internal_metrics") assert config.plugin("internal_state").ping_enabled is True assert config.plugin("internal_state").ping_interval > 0 assert config.plugin("internal_state").enabled is True
async def test_multiple_event_transports(loop, server, redis_server_b): """Configure a bus with two redis transports and ensure they write to the correct redis servers""" registry.add(ApiA()) registry.add(ApiB()) manually_set_plugins(plugins={}) redis_server_a = server port_a = redis_server_a.tcp_address.port port_b = redis_server_b.tcp_address.port logging.warning(f"Server A port: {port_a}") logging.warning(f"Server B port: {port_b}") config = Config.load_dict( { "bus": {"schema": {"transport": {"redis": {"url": f"redis://localhost:{port_a}"}}}}, "apis": { "default": { "event_transport": { "redis": { "url": f"redis://localhost:{port_a}", "stream_use": StreamUse.PER_EVENT.value, } } }, "api_b": { "event_transport": { "redis": { "url": f"redis://localhost:{port_b}", "stream_use": StreamUse.PER_EVENT.value, } } }, }, } ) bus = BusPath(name="", parent=None, client=lightbus.BusClient(config=config, loop=loop)) await asyncio.sleep(0.1) await bus.api_a.event_a.fire_async() await bus.api_b.event_b.fire_async() connection_manager_a = bus.client.transport_registry.get_event_transport( "api_a" ).connection_manager connection_manager_b = bus.client.transport_registry.get_event_transport( "api_b" ).connection_manager with await connection_manager_a() as redis: assert await redis.xrange("api_a.event_a:stream") assert await redis.xrange("api_b.event_b:stream") == [] with await connection_manager_b() as redis: assert await redis.xrange("api_a.event_a:stream") == [] assert await redis.xrange("api_b.event_b:stream")
async def test_multiple_event_transports(loop, redis_server_url, redis_server_b_url, create_redis_client): """Configure a bus with two redis transports and ensure they write to the correct redis servers""" redis_url_a = redis_server_url redis_url_b = redis_server_b_url logging.warning(f"Server A URL: {redis_url_a}") logging.warning(f"Server B URL: {redis_url_b}") config = Config.load_dict({ "bus": { "schema": { "transport": { "redis": { "url": redis_url_a } } } }, "apis": { "default": { "event_transport": { "redis": { "url": redis_url_a, "stream_use": StreamUse.PER_EVENT.value } } }, "api_b": { "event_transport": { "redis": { "url": redis_url_b, "stream_use": StreamUse.PER_EVENT.value } } }, }, }) bus = lightbus.create(config=config) bus.client.disable_proxy() bus.client.register_api(ApiA()) bus.client.register_api(ApiB()) await asyncio.sleep(0.1) await bus.api_a.event_a.fire_async() await bus.api_b.event_b.fire_async() redis_a = await create_redis_client(address=redis_server_url) redis_b = await create_redis_client(address=redis_server_b_url) assert await redis_a.xrange("api_a.event_a:stream") assert await redis_a.xrange("api_b.event_b:stream") == [] assert await redis_b.xrange("api_a.event_a:stream") == [] assert await redis_b.xrange("api_b.event_b:stream") await bus.client.close_async()
def test_default_config(): config = Config.load_dict({}) assert config.bus() assert config.api() assert config.api().rpc_transport.redis assert config.api().result_transport.redis assert config.api().event_transport.redis assert config.bus().schema.transport.redis
def test_commands_run_env(mocker, server, redis_config_file, set_env, test_bus_module): m = mocker.patch.object(BusClient, "_actually_run_forever") args = commands.parse_args(args=["run"]) with set_env(LIGHTBUS_CONFIG=redis_config_file, LIGHTBUS_MODULE=test_bus_module): lightbus.commands.run.Command().handle(args, config=Config(RootConfig())) assert m.called
def test_autoload_plugins(): config = Config.load_dict({}) assert get_plugins() is None assert autoload_plugins(config) assert [(name, p.__class__) for name, p in get_plugins().items()] == [ ("internal_state", StatePlugin), ("internal_metrics", MetricsPlugin), ]
def test_not_equal(redis_pool, redis_server_url): """Test the __eq__ method""" assert redis_pool != TransportPool( transport_class=RedisEventTransport, transport_config=RedisEventTransport.Config(url=redis_server_url, service_name="123"), config=Config.default(), )
def test_hash_equal(dummy_pool): """Test the __hash__ method""" assert hash(dummy_pool) == hash( TransportPool( transport_class=DebugEventTransport, transport_config=DebugEventTransport.Config(), config=Config.default(), ))
async def dummy_pool(): pool = TransportPool( transport_class=DebugEventTransport, transport_config=DebugEventTransport.Config(), config=Config.default(), ) yield pool await pool.close()
def test_plugin_selector_custom_config(): config = Config.load_dict( {"plugins": { "internal_state": { "ping_interval": 123 } }}) assert config.plugin("internal_state").ping_interval == 123
async def redis_pool(redis_server_url): pool = TransportPool( transport_class=RedisEventTransport, transport_config=RedisEventTransport.Config(url=redis_server_url), config=Config.default(), ) yield pool await pool.close()
def test_load_bus_config_url_yaml(mock_urlopen, tmp_directory: Path): response = MagicMock() response.getcode.return_value = 200 response.read.return_value = EXAMPLE_VALID_YAML response.headers = {"Content-Type": "application/yaml"} mock_urlopen.return_value = response config = Config.load_file("http://999.999.999.999/config") assert config.bus().log_level == LogLevelEnum.WARNING
def load_config(from_file: str = None, service_name: str = None, process_name: str = None) -> Config: from_file = from_file or os.environ.get("LIGHTBUS_CONFIG") if from_file: logger.info(f"Loading config from {from_file}") config = Config.load_file(file_path=from_file) else: logger.info(f"No config file specified, will use default config") config = Config.load_dict({}) if service_name: config._config.set_service_name(service_name) if process_name: config._config.set_process_name(process_name) return config
def test_load_bus_config_url_json_no_headers(mock_urlopen, tmp_directory: Path): response = MagicMock() response.getcode.return_value = 200 response.read.return_value = EXAMPLE_VALID_JSON mock_urlopen.return_value = response config = Config.load_file("http://999.999.999.999/config.json") # Still works because the URL ends in .json assert config.bus().log_level == LogLevelEnum.WARNING
def test_from_config(): transport = TransactionalEventTransport.from_config( config=Config.load_dict({}), child_transport={"redis": { "url": "redis://foo/1" }}, database_class="lightbus.transports.transactional.DbApiConnection", ) assert isinstance(transport.child_transport, RedisEventTransport) assert transport.child_transport.connection_parameters[ "address"] == "redis://foo/1"
def parse_args(args=None): parser = argparse.ArgumentParser( description="Lightbus management command.") parser.add_argument( "--service-name", "-s", help="Name of service in which this process resides. YOU SHOULD " "LIKELY SET THIS IN PRODUCTION. Can also be set using the " "LIGHTBUS_SERVICE_NAME environment. Will default to a random string.", ) parser.add_argument( "--process-name", "-p", help= "A unique name of this process within the service. Can also be set using the " "LIGHTBUS_PROCESS_NAME environment. Will default to a random string.", ) parser.add_argument("--config", dest="config_file", help="Config file to load, JSON or YAML", metavar="FILE") parser.add_argument( "--log-level", help="Set the log level. Overrides any value set in config. " "One of debug, info, warning, critical, exception.", metavar="LOG_LEVEL", ) subparsers = parser.add_subparsers(help="Commands", dest="subcommand") subparsers.required = True lightbus.commands.run.Command().setup(parser, subparsers) lightbus.commands.shell.Command().setup(parser, subparsers) lightbus.commands.dump_schema.Command().setup(parser, subparsers) lightbus.commands.dump_schema.Command().setup(parser, subparsers) lightbus.commands.dump_config_schema.Command().setup(parser, subparsers) lightbus.commands.inspect.Command().setup(parser, subparsers) # Create a temporary plugin registry in order to run the before_parse_args hook plugin_registry = PluginRegistry() plugin_registry.autoload_plugins(config=Config.load_dict({})) block( plugin_registry.execute_hook("before_parse_args", parser=parser, subparsers=subparsers), timeout=5, ) args = parser.parse_args(sys.argv[1:] if args is None else args) # Note that we don't have an after_parse_args plugin hook. Instead we use the receive_args # hook which is called once we have instantiated our plugins return args
def redis_other_config(): return Config.load_dict( { "apis": { "other": { "rpc_transport": {"redis": {}}, "result_transport": {"redis": {}}, "event_transport": {"redis": {}}, } } } )
def test_plugin_enabled(): config = Config.load_dict({ "plugins": { "internal_state": { "enabled": True }, "internal_metrics": { "enabled": True } } }) plugins = autoload_plugins(config) assert plugins
def redis_default_config(): return Config.load_dict( { "bus": {"schema": {"transport": {"redis": {}}}}, "apis": { "default": { "rpc_transport": {"redis": {}}, "result_transport": {"redis": {}}, "event_transport": {"redis": {}}, } }, } )
def test_plugin_enabled(plugin_registry: PluginRegistry): config = Config.load_dict({ "plugins": { "internal_state": { "enabled": True }, "internal_metrics": { "enabled": True } } }) plugin_registry.autoload_plugins(config) assert plugin_registry._plugins
async def test_no_transport(): # No transports configured for any relevant api config = Config.load_dict( {"apis": { "default": { "event_transport": { "redis": {} } } }}, set_defaults=False) client = lightbus.BusClient(config=config) with pytest.raises(TransportNotFound): await client.call_rpc_remote("my_api", "test", kwargs={}, options={})