def setUp(self): # Fixture Setup clock = LiveClock() guid_factory = LiveGuidFactory() logger = LiveLogger() self.zmq_context = zmq.Context() self.response_handler = ObjectStorer() self.subscriber = MessageSubscriber( ClientId("Subscriber-001"), LOCALHOST, TEST_PUB_PORT, CompressorBypass(), EncryptionSettings(), clock, guid_factory, LoggerAdapter('MessageSubscriber', logger)) self.publisher = MessagePublisher( ServerId("Publisher-001"), TEST_PUB_PORT, CompressorBypass(), EncryptionSettings(), clock, guid_factory, LoggerAdapter('MessagePublisher', logger)) self.publisher.start()
def test_can_log_critical_messages_to_console(self): # Arrange logger = TestLogger(clock=TestClock(), level_console=LogLevel.CRITICAL) logger_adapter = LoggerAdapter("TEST_LOGGER", logger) # Act logger_adapter.critical("This is a log message.") # Assert self.assertTrue(True) # Does not raise errors.
def test_can_log_warning_messages_to_console(self): # Arrange logger = TestLogger(clock=TestClock(), level_console=LogLevel.WARNING) logger_adapter = LoggerAdapter("TEST_LOGGER", logger) # Act logger_adapter.warning("This is a log message.") # Assert self.assertTrue(True) # Does not raise errors.
def test_log_verbose_messages_to_console(self): # Arrange logger = TestLogger(clock=TestClock(), level_console=LogLevel.VERBOSE) logger_adapter = LoggerAdapter("TEST_LOGGER", logger) # Act logger_adapter.verbose("This is a log message.") # Assert self.assertTrue(True) # Does not raise errors.
def test_can_log_error_messages_to_console(self): # Arrange logger = TestLogger(level_console=LogLevel.ERROR) logger_adapter = LoggerAdapter('TEST_LOGGER', logger) # Act logger_adapter.error("This is a log message.") # Assert self.assertTrue(True) # Does not raise errors.
def test_log_info_messages_to_console(self): # Arrange logger = Logger(clock=TestClock(), level_stdout=LogLevel.INFO) logger_adapter = LoggerAdapter(component="TEST_LOGGER", logger=logger) # Act logger_adapter.info("This is a log message.") # Assert assert True # No exceptions raised
def test_log_info_messages_to_console(self): # Arrange logger = TestLogger(clock=TestClock(), level_console=LogLevel.INFO) logger_adapter = LoggerAdapter("TEST_LOGGER", logger) # Act logger_adapter.info("This is a log message.") # Assert self.assertTrue(True) # No exception raised
def test_log_critical_messages_to_console(self): # Arrange logger = Logger(clock=TestClock(), level_stdout=LogLevel.CRITICAL) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act logger_adapter.critical("This is a log message.") # Assert assert True # No exceptions raised
def test_log_warning_messages_to_console(self): # Arrange logger = Logger(clock=TestClock(), level_stdout=LogLevel.WARNING) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act logger_adapter.warning("This is a log message.") # Assert assert True # No exceptions raised
def test_log_info_messages_to_console_with_green_colour(self): # Arrange logger = Logger(clock=TestClock(), level_stdout=LogLevel.INFO) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act logger_adapter.info("This is a log message.", color=LogColor.GREEN) # Assert assert True # No exceptions raised
def test_stop_when_running_in_process(self): # Arrange logger = LiveLogger(clock=LiveClock(), run_in_process=True) logger_adapter = LoggerAdapter("LIVE_LOGGER", logger) logger_adapter.info("A log message.") # Act logger.stop() # Assert self.assertTrue(True) # No exception raised
def setup(self): # Fresh isolated loop testing pattern self.loop = asyncio.new_event_loop() asyncio.set_event_loop(self.loop) self.logger = LiveLogger( loop=self.loop, clock=LiveClock(), ) self.logger_adapter = LoggerAdapter(component="LIVER_LOGGER", logger=self.logger)
def test_log_info_with_annotation_sends_to_stdout(self): # Arrange logger = Logger(clock=TestClock(), level_stdout=LogLevel.INFO) logger_adapter = LoggerAdapter(component="TEST_LOGGER", logger=logger) annotations = {"my_tag": "something"} # Act logger_adapter.info("This is a log message.", annotations=annotations) # Assert assert True # No exceptions raised
def test_log_exception_messages_to_console(self): # Arrange logger = Logger(clock=TestClock(), level_stdout=LogLevel.CRITICAL) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act logger_adapter.exception("We intentionally divided by zero!", ZeroDivisionError("Oops")) # Assert assert True # No exceptions raised
def setup(self): # Fixture Setup self.loop = asyncio.get_event_loop() self.loop.set_debug(True) self.logger = LiveLogger( loop=self.loop, clock=LiveClock(), level_stdout=LogLevel.DEBUG, ) self.logger_adapter = LoggerAdapter(component_name="LIVER_LOGGER", logger=self.logger)
def __init__( self, client: BetfairClient, logger: Logger, market_filter: Optional[Dict] = None, ): super().__init__() self.market_filter = market_filter or {} self.venue = BETFAIR_VENUE self._client = client self._log = LoggerAdapter("BetfairInstrumentProvider", logger) self._cache: Dict[InstrumentId, BettingInstrument] = {} self._account_currency = None self._missing_instruments: Set[BettingInstrument] = set()
def get_cached_betfair_instrument_provider( client: BetfairClient, logger: Logger, market_filter: tuple, ) -> BetfairInstrumentProvider: """ Cache and return a BetfairInstrumentProvider. If a cached provider already exists, then that cached provider will be returned. Parameters ---------- client : BinanceHttpClient The client for the instrument provider. logger : Logger The logger for the instrument provider. market_filter : tuple The market filter to load into the instrument provider. Returns ------- BinanceInstrumentProvider """ LoggerAdapter( "BetfairFactory", logger).warning("Creating new instance of BetfairInstrumentProvider") return BetfairInstrumentProvider(client=client, logger=logger, market_filter=dict(market_filter))
def get_cached_betfair_client( username: Optional[str], password: Optional[str], app_key: Optional[str], cert_dir: Optional[str], loop: asyncio.AbstractEventLoop, logger: Logger, ) -> BetfairClient: """ Cache and return a Betfair HTTP client with the given credentials. If a cached client with matching credentials already exists, then that cached client will be returned. Parameters ---------- username : str, optional The API username for the client. If None then will source from the `BETFAIR_USERNAME` env var. password : str, optional The API password for the client. If None then will source from the `BETFAIR_PASSWORD` env var. app_key : str, optional The API application key for the client. If None then will source from the `BETFAIR_APP_KEY` env var. cert_dir : str, optional The API SSL certificate directory for the client. If None then will source from the `BETFAIR_CERT_DIR` env var. loop : asyncio.AbstractEventLoop The event loop for the client. logger : Logger The logger for the client. Returns ------- BetfairClient """ global CLIENTS username = username or os.environ["BETFAIR_USERNAME"] password = password or os.environ["BETFAIR_PASSWORD"] app_key = app_key or os.environ["BETFAIR_APP_KEY"] cert_dir = cert_dir or os.environ["BETFAIR_CERT_DIR"] key: str = "|".join((username, password, app_key, cert_dir)) if key not in CLIENTS: LoggerAdapter("BetfairFactory", logger).warning( "Creating new instance of BetfairClient", ) client = BetfairClient( username=username, password=password, app_key=app_key, cert_dir=cert_dir, loop=loop, logger=logger, ) CLIENTS[key] = client return CLIENTS[key]
def __init__( self, client: BinanceHttpClient, logger: Logger, ): super().__init__() self.venue = BINANCE_VENUE self._client = client self._log = LoggerAdapter(type(self).__name__, logger) self._wallet = BinanceWalletHttpAPI(self._client) self._spot_market = BinanceSpotMarketHttpAPI(self._client) # Async loading flags self._loaded = False self._loading = False
def __init__(self, client: BetfairClient, logger: Logger, message_handler: Callable, **kwargs): self.subscription_message = None super().__init__( client=client, logger_adapter=LoggerAdapter("BetfairMarketStreamClient", logger), message_handler=message_handler, **kwargs, )
def setUp(self): # Fixture Setup clock = LiveClock() guid_factory = LiveGuidFactory() logger = LiveLogger() self.context = zmq.Context() self.client_sink = [] self.server_sink = [] self.server = MessageServer( ServerId("Server-001"), TEST_RECV_PORT, TEST_SEND_PORT, MsgPackDictionarySerializer(), MsgPackRequestSerializer(), MsgPackResponseSerializer(), CompressorBypass(), EncryptionSettings(), clock, guid_factory, LoggerAdapter('MessageServer', logger)) # Register test handlers self.server.register_handler(MessageType.STRING, self.server_sink.append) self.server.start() self.command_serializer = MsgPackCommandSerializer() self.server.register_handler(MessageType.COMMAND, self.command_handler) self.client = MessageClient( ClientId("Trader-001"), LOCALHOST, TEST_RECV_PORT, TEST_SEND_PORT, MsgPackDictionarySerializer(), MsgPackRequestSerializer(), MsgPackResponseSerializer(), CompressorBypass(), EncryptionSettings(), clock, guid_factory, LoggerAdapter('MessageClient', logger)) self.client.register_handler(self.client_sink.append)
def test_advance_time(self): logger = LoggerAdapter('TestClock', TestLogger(level_console=LogLevel.DEBUG)) store = [] clock.register_logger(logger) clock.set_timer(Label('test'), timedelta(seconds=1), handler=store.append) iterations = 1 result = PerformanceHarness.profile_function(TestClockTests.advance_time, 1, iterations) # ~1036ms (1036473μs) minimum of 1 runs @ 1000000 iterations each run. self.assertTrue(result < 1.5)
def test_register_sink_sends_records_to_sink(self): # Arrange sink = [] logger = Logger(clock=TestClock(), level_stdout=LogLevel.CRITICAL) logger_adapter = LoggerAdapter(component_name="TEST_LOGGER", logger=logger) # Act logger.register_sink(sink.append) logger_adapter.info("A log event", annotations={"tag": "risk"}) # Assert assert sink[0] == { "component": "TEST_LOGGER", "machine_id": socket.gethostname(), "level": "INF", "msg": "A log event", "instance_id": f"{logger.instance_id.value}", "tag": "risk", "timestamp": 0, "trader_id": "TRADER-000", }
async def test_socket_base(socket_server, logger, event_loop): messages = [] def handler(raw): messages.append(raw) if len(messages) > 5: client.stop() host, port = socket_server.server_address client = SocketClient( host=host, port=port, message_handler=handler, loop=event_loop, logger_adapter=LoggerAdapter("Socket", logger), ssl=False, ) await client.start() assert messages == [b"hello"] * 6
def __init__( self, client: BetfairClient, logger: Logger, message_handler, partition_matched_by_strategy_ref=True, include_overall_position=None, customer_strategy_refs=None, **kwargs, ): super().__init__( client=client, logger_adapter=LoggerAdapter("BetfairOrderStreamClient", logger), message_handler=message_handler, **kwargs, ) self.order_filter = { "includeOverallPosition": include_overall_position, "customerStrategyRefs": customer_strategy_refs, "partitionMatchedByStrategyRef": partition_matched_by_strategy_ref, }
def __init__( self, client: APIClient, logger: Logger, message_handler, partition_matched_by_strategy_ref=True, include_overall_position=None, customer_strategy_refs=None, **kwargs, ): super().__init__( client=client, logger_adapter=LoggerAdapter("BetfairOrderStreamClient", logger), message_handler=message_handler, **kwargs, ) self.order_filter = streaming_order_filter( include_overall_position=include_overall_position, customer_strategy_refs=customer_strategy_refs, partition_matched_by_strategy_ref=partition_matched_by_strategy_ref, )
def __init__( self, client: BetfairClient, logger_adapter: LoggerAdapter, message_handler, loop=None, host=None, port=None, crlf=None, encoding=None, ): super().__init__( loop=loop or asyncio.get_event_loop(), logger=logger_adapter.get_logger(), host=host or HOST, port=port or PORT, handler=message_handler, crlf=crlf or CRLF, encoding=encoding or ENCODING, ) self.client = client self.unique_id = self.new_unique_id()
def setup(self): # Fixture Setup self.loop = asyncio.get_event_loop() self.loop.set_debug(True) self.clock = LiveClock() self.uuid_factory = UUIDFactory() self.trader_id = TestIdStubs.trader_id() self.venue = BETFAIR_VENUE # Setup logging self.logger = LiveLogger(loop=self.loop, clock=self.clock, level_stdout=LogLevel.DEBUG) self._log = LoggerAdapter("TestBetfairExecutionClient", self.logger) self.msgbus = MessageBus( trader_id=self.trader_id, clock=self.clock, logger=self.logger, ) self.cache = TestComponentStubs.cache()
async def run_test(): # Arrange logger = LiveLogger( loop=self.loop, clock=LiveClock(), maxsize=1, ) logger_adapter = LoggerAdapter(component="LIVE_LOGGER", logger=logger) logger.start() # Act logger_adapter.info("A log message.") logger_adapter.info("A log message.") # <-- blocks logger_adapter.info("A log message.") # <-- blocks logger_adapter.info("A log message.") # <-- blocks await asyncio.sleep(0.1) # <-- processes all log messages self.logger.stop() # Assert assert not self.logger.is_running
def __init__( self, strategies: List[TradingStrategy], config: Dict[str, object], ): """ Initialize a new instance of the TradingNode class. Parameters ---------- strategies : list[TradingStrategy] The list of strategies to run on the trading node. config : dict[str, object] The configuration for the trading node. Raises ------ ValueError If strategies is None or empty. ValueError If config is None or empty. """ PyCondition.not_none(strategies, "strategies") PyCondition.not_none(config, "config") PyCondition.not_empty(strategies, "strategies") PyCondition.not_empty(config, "config") # Extract configs config_trader = config.get("trader", {}) config_log = config.get("logging", {}) config_exec_db = config.get("exec_database", {}) config_strategy = config.get("strategy", {}) config_adapters = config.get("adapters", {}) self._uuid_factory = UUIDFactory() self._loop = asyncio.get_event_loop() self._executor = concurrent.futures.ThreadPoolExecutor() self._loop.set_default_executor(self._executor) self._clock = LiveClock(loop=self._loop) self.created_time = self._clock.utc_now() self._is_running = False # Uncomment for debugging # self._loop.set_debug(True) # Setup identifiers self.trader_id = TraderId( name=config_trader["name"], tag=config_trader["id_tag"], ) # Setup logging self._logger = LiveLogger( clock=self._clock, name=self.trader_id.value, level_console=LogLevelParser.from_str_py(config_log.get("log_level_console")), level_file=LogLevelParser.from_str_py(config_log.get("log_level_file")), level_store=LogLevelParser.from_str_py(config_log.get("log_level_store")), run_in_process=config_log.get("run_in_process", True), # Run logger in a separate process log_thread=config_log.get("log_thread_id", False), log_to_file=config_log.get("log_to_file", False), log_file_path=config_log.get("log_file_path", ""), ) self._log = LoggerAdapter(component_name=self.__class__.__name__, logger=self._logger) self._log_header() self._log.info("Building...") self._setup_loop() # Requires the logger to be initialized self.portfolio = Portfolio( clock=self._clock, logger=self._logger, ) self._data_engine = LiveDataEngine( loop=self._loop, portfolio=self.portfolio, clock=self._clock, logger=self._logger, config={"qsize": 10000}, ) self.portfolio.register_cache(self._data_engine.cache) self.analyzer = PerformanceAnalyzer() if config_exec_db["type"] == "redis": exec_db = RedisExecutionDatabase( trader_id=self.trader_id, logger=self._logger, command_serializer=MsgPackCommandSerializer(), event_serializer=MsgPackEventSerializer(), config={ "host": config_exec_db["host"], "port": config_exec_db["port"], } ) else: exec_db = BypassExecutionDatabase( trader_id=self.trader_id, logger=self._logger, ) self._exec_engine = LiveExecutionEngine( loop=self._loop, database=exec_db, portfolio=self.portfolio, clock=self._clock, logger=self._logger, config={"qsize": 10000}, ) self._exec_engine.load_cache() self._setup_adapters(config_adapters, self._logger) self.trader = Trader( trader_id=self.trader_id, strategies=strategies, portfolio=self.portfolio, data_engine=self._data_engine, exec_engine=self._exec_engine, clock=self._clock, logger=self._logger, ) self._check_residuals_delay = config_trader.get("check_residuals_delay", 5.0) self._load_strategy_state = config_strategy.get("load_state", True) self._save_strategy_state = config_strategy.get("save_state", True) if self._load_strategy_state: self.trader.load() self._log.info("state=INITIALIZED.") self.time_to_initialize = self._clock.delta(self.created_time) self._log.info(f"Initialized in {self.time_to_initialize.total_seconds():.3f}s.")