async def run_test():
            # Arrange
            logger = LiveLogger(
                loop=self.loop,
                clock=LiveClock(),
                maxsize=5,
            )

            logger_adapter = LoggerAdapter(component="LIVE_LOGGER",
                                           logger=logger)
            logger.start()

            # Act
            logger_adapter.info("A log message.")
            logger_adapter.info("A log message.")  # <-- blocks
            logger_adapter.info("A different log message.")  # <-- blocks
            logger_adapter.info("A log message.")  # <-- blocks
            logger_adapter.info("A different log message.")  # <-- blocks
            logger_adapter.info("A log message.")  # <-- blocks

            await asyncio.sleep(0.1)  # <-- processes all log messages
            self.logger.stop()

            # Assert
            assert not self.logger.is_running
def instrument_list(mock_load_markets_metadata,
                    loop: asyncio.AbstractEventLoop):
    """Prefill `INSTRUMENTS` cache for tests"""
    global INSTRUMENTS

    # Setup
    logger = LiveLogger(loop=loop,
                        clock=LiveClock(),
                        level_stdout=LogLevel.ERROR)
    client = BetfairTestStubs.betfair_client(loop=loop, logger=logger)
    logger = LiveLogger(loop=loop,
                        clock=LiveClock(),
                        level_stdout=LogLevel.DEBUG)
    instrument_provider = BetfairInstrumentProvider(client=client,
                                                    logger=logger,
                                                    market_filter={})

    # Load instruments
    market_ids = BetfairDataProvider.market_ids()
    catalog = {
        r["marketId"]: r
        for r in BetfairResponses.betting_list_market_catalogue()["result"]
        if r["marketId"] in market_ids
    }
    mock_load_markets_metadata.return_value = catalog
    t = loop.create_task(
        instrument_provider.load_all_async(
            market_filter={"market_id": market_ids}))
    loop.run_until_complete(t)

    # Fill INSTRUMENTS global cache
    INSTRUMENTS.extend(instrument_provider.list_all())
    assert INSTRUMENTS
    def setup(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.trader_id = TraderId("TESTER-001")

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)
        self.executor = concurrent.futures.ThreadPoolExecutor()
        self.loop.set_default_executor(self.executor)
        self.loop.set_debug(True)

        # Setup logging
        logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
            trader_id=self.trader_id,
            level_stdout=LogLevel.DEBUG,
        )

        self.logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
        )

        self.cache = TestStubs.cache()

        self.portfolio = Portfolio(
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.mock_oanda = MagicMock()

        self.client = OandaDataClient(
            client=self.mock_oanda,
            account_id="001",
            engine=self.data_engine,
            clock=self.clock,
            logger=logger,
        )

        self.data_engine.register_client(self.client)

        with open(TEST_PATH + "instruments.json") as response:
            instruments = json.load(response)

        self.mock_oanda.request.return_value = instruments
    def test_stop_when_running_in_process(self):
        # Arrange
        logger = LiveLogger(clock=LiveClock(), run_in_process=True)
        logger_adapter = LoggerAdapter("LIVE_LOGGER", logger)

        logger_adapter.info("A log message.")

        # Act
        logger.stop()

        # Assert
        self.assertTrue(True)  # No exception raised
Beispiel #5
0
    def setup(self):
        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        self.logger = LiveLogger(
            loop=self.loop,
            clock=LiveClock(),
        )

        self.logger_adapter = LoggerAdapter(component="LIVER_LOGGER",
                                            logger=self.logger)
Beispiel #6
0
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.loop.set_debug(True)

        self.logger = LiveLogger(
            loop=self.loop,
            clock=LiveClock(),
            level_stdout=LogLevel.DEBUG,
        )

        self.logger_adapter = LoggerAdapter(component_name="LIVER_LOGGER",
                                            logger=self.logger)
Beispiel #7
0
    def setup(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.trader_id = TraderId("TESTER", "001")

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        self.logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
        )

        self.portfolio = Portfolio(
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
        )

        database = BypassExecutionDatabase(trader_id=self.trader_id, logger=self.logger)
        self.exec_engine = LiveExecutionEngine(
            loop=self.loop,
            database=database,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
        )
Beispiel #8
0
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.clock = LiveClock()
        self.logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
            level_stdout=LogLevel.DEBUG,
        )

        self.trader_id = TestStubs.trader_id()
        self.strategy_id = TestStubs.strategy_id()
        self.account_id = TestStubs.account_id()

        self.msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self.clock,
            logger=self.logger,
        )

        self.cache_db = MockCacheDatabase(logger=self.logger, )

        self.cache = Cache(
            database=self.cache_db,
            logger=self.logger,
        )
Beispiel #9
0
 def setup(self):
     # Fixture Setup
     self.loop = asyncio.get_event_loop()
     self.clock = LiveClock()
     self.logger = LiveLogger(loop=self.loop, clock=self.clock)
     self.client = BetfairTestStubs.betfair_client(loop=self.loop,
                                                   logger=self.logger)
async def test_binance_websocket_client():
    loop = asyncio.get_event_loop()
    clock = LiveClock()

    client = get_cached_binance_http_client(
        loop=loop,
        clock=clock,
        logger=Logger(clock=clock),
        key=os.getenv("BINANCE_API_KEY"),
        secret=os.getenv("BINANCE_API_SECRET"),
    )
    await client.connect()

    user = BinanceUserDataHttpAPI(client=client)
    response = await user.create_listen_key_spot()
    key = response["listenKey"]

    ws = BinanceUserDataWebSocket(
        loop=loop,
        clock=clock,
        logger=LiveLogger(loop=loop, clock=clock),
        handler=print,
    )

    ws.subscribe(key=key)

    await ws.connect(start=True)
    await asyncio.sleep(4)
    await ws.close()
    await client.disconnect()
Beispiel #11
0
    def setUp(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.trader_id = TraderId("TESTER", "001")

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        # Setup logging
        logger = LiveLogger(
            clock=self.clock,
            name=self.trader_id.value,
            level_console=LogLevel.INFO,
            level_file=LogLevel.DEBUG,
            level_store=LogLevel.WARNING,
        )

        self.logger = LiveLogger(self.clock)

        self.portfolio = Portfolio(
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
        )

        self.mock_binance_rest = MagicMock()
        self.mock_binance_rest.name = "Binance"

        self.mock_binance_feed = MagicMock()

        self.client = BinanceDataClient(
            client_rest=self.mock_binance_rest,
            client_feed=self.mock_binance_feed,
            engine=self.data_engine,
            clock=self.clock,
            logger=logger,
        )

        self.data_engine.register_client(self.client)
Beispiel #12
0
    def setup(self):
        os.environ.update(
            {
                "TWS_USERNAME": "******",
                "TWS_PASSWORD": "******",
            }
        )
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.clock = LiveClock()
        self.logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
            level_stdout=LogLevel.DEBUG,
        )

        self.trader_id = TestIdStubs.trader_id()
        self.strategy_id = TestIdStubs.strategy_id()
        self.account_id = TestIdStubs.account_id()

        self.msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self.clock,
            logger=self.logger,
        )

        self.cache_db = MockCacheDatabase(
            logger=self.logger,
        )

        self.cache = Cache(
            database=self.cache_db,
            logger=self.logger,
        )
        with patch("nautilus_trader.adapters.interactive_brokers.factories.get_cached_ib_client"):
            self.data_client = InteractiveBrokersLiveDataClientFactory.create(
                loop=self.loop,
                name="IB",
                config=InteractiveBrokersDataClientConfig(  # noqa: S106
                    username="******", password="******"
                ),
                msgbus=self.msgbus,
                cache=self.cache,
                clock=self.clock,
                logger=self.logger,
            )
        with patch("nautilus_trader.adapters.interactive_brokers.factories.get_cached_ib_client"):
            self.exec_client = InteractiveBrokersLiveExecClientFactory.create(
                loop=self.loop,
                name="IB",
                config=InteractiveBrokersExecClientConfig(  # noqa: S106
                    username="******", password="******"
                ),
                msgbus=self.msgbus,
                cache=self.cache,
                clock=self.clock,
                logger=self.logger,
            )
 def setup(self):
     # Fixture Setup
     self.loop = asyncio.get_event_loop()
     self.clock = LiveClock()
     self.logger = LiveLogger(loop=self.loop, clock=self.clock)
     self.instrument = BetfairTestStubs.betting_instrument()
     self.client = BetfairTestStubs.betfair_client(loop=self.loop,
                                                   logger=self.logger)
     self.provider = BetfairTestStubs.instrument_provider(self.client)
     self.uuid = UUID4()
 def setup(self):
     # Fixture Setup
     self.loop = asyncio.get_event_loop()
     self.clock = LiveClock()
     self.logger = LiveLogger(loop=self.loop, clock=self.clock)
     self.client = BetfairTestStubs.betfair_client(loop=self.loop,
                                                   logger=self.logger)
     self.provider = BetfairInstrumentProvider(
         client=self.client,
         logger=TestComponentStubs.logger(),
     )
 def setup(self):
     # Fixture Setup
     self.loop = asyncio.get_event_loop()
     self.clock = LiveClock()
     self.logger = LiveLogger(loop=self.loop, clock=self.clock)
     self.client = BetfairTestStubs.betfair_client(loop=self.loop,
                                                   logger=self.logger)
     self.provider = BetfairInstrumentProvider(
         client=self.client,
         logger=BetfairTestStubs.live_logger(BetfairTestStubs.clock()),
         market_filter=None,
     )
Beispiel #16
0
    async def test_log_when_queue_over_maxsize_blocks(self):
        # Arrange
        logger = LiveLogger(
            loop=self.loop,
            clock=LiveClock(),
            maxsize=5,
        )

        logger_adapter = LoggerAdapter(component_name="LIVE_LOGGER",
                                       logger=logger)
        logger.start()

        # Act
        logger_adapter.info("A log message.")
        logger_adapter.info("A log message.")  # <-- blocks
        logger_adapter.info("A different log message.")  # <-- blocks
        logger_adapter.info("A log message.")  # <-- blocks
        logger_adapter.info("A different log message.")  # <-- blocks
        logger_adapter.info("A log message.")  # <-- blocks

        await asyncio.sleep(0.3)  # <-- processes all log messages
        logger.stop()
        await asyncio.sleep(0.3)

        # Assert
        assert not logger.is_running
Beispiel #17
0
 def setup(self):
     self.ib = MagicMock()
     self.loop = asyncio.get_event_loop()
     self.clock = LiveClock()
     self.logger = LiveLogger(
         loop=self.loop,
         clock=self.clock,
         level_stdout=LogLevel.DEBUG,
     )
     self.provider = InteractiveBrokersInstrumentProvider(
         client=self.ib,
         logger=self.logger,
         config=InstrumentProviderConfig())
Beispiel #18
0
 def setup(self):
     # Fixture Setup
     self.loop = asyncio.get_event_loop()
     self.clock = LiveClock()
     self.logger = LiveLogger(loop=self.loop, clock=self.clock)
     self.client = BetfairClient(  # noqa: S106 (no hardcoded password)
         username="******",
         password="******",
         app_key="app_key",
         cert_dir="/certs",
         loop=self.loop,
         logger=self.logger,
         ssl=True,
     )
     self.client.session_token = "xxxsessionToken="
async def test_binance_websocket_client():
    loop = asyncio.get_event_loop()
    clock = LiveClock()

    client = BinanceSpotWebSocket(
        loop=loop,
        clock=clock,
        logger=LiveLogger(loop=loop, clock=clock),
        handler=print,
    )

    client.subscribe_ticker()

    await client.connect(start=True)
    await asyncio.sleep(4)
    await client.close()
async def test_binance_websocket_client():
    loop = asyncio.get_event_loop()
    clock = LiveClock()

    client = BinanceWebSocketClient(
        loop=loop,
        clock=clock,
        logger=LiveLogger(loop=loop, clock=clock),
        handler=print,
        base_url="wss://fstream.binance.com",
    )

    client.subscribe_book_ticker("BTCUSDT-PERP")

    await client.connect(start=True)
    await asyncio.sleep(4)
    await client.close()
async def test_ftx_websocket_client():
    loop = asyncio.get_event_loop()
    clock = LiveClock()

    client = FTXWebSocketClient(
        loop=loop,
        clock=clock,
        logger=LiveLogger(loop=loop, clock=clock),
        msg_handler=print,
        reconnect_handler=print,
        key=os.getenv("FTX_API_KEY"),
        secret=os.getenv("FTX_API_SECRET"),
    )

    await client.connect(start=True)

    # await client.subscribe_markets()
    await client.subscribe_orderbook("ETH-PERP")
    await asyncio.sleep(3)
    await client.disconnect()
    await client.close()
    def setup(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.trader_id = TraderId("TESTER-001")

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        self.logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
        )

        self.cache = TestStubs.cache()

        self.portfolio = Portfolio(
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.exec_engine = LiveExecutionEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.loop.set_debug(True)

        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()

        self.trader_id = TestIdStubs.trader_id()
        self.venue = BETFAIR_VENUE

        # Setup logging
        self.logger = LiveLogger(loop=self.loop,
                                 clock=self.clock,
                                 level_stdout=LogLevel.DEBUG)
        self._log = LoggerAdapter("TestBetfairExecutionClient", self.logger)

        self.msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self.clock,
            logger=self.logger,
        )
        self.cache = TestComponentStubs.cache()
Beispiel #24
0
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.loop.set_debug(True)

        self.clock = LiveClock()
        self.venue = BETFAIR_VENUE
        self.account = TestExecStubs.betting_account()
        self.instrument = BetfairTestStubs.betting_instrument()

        # Setup logging
        self.logger = LiveLogger(loop=self.loop,
                                 clock=self.clock,
                                 level_stdout=LogLevel.DEBUG)

        self.msgbus = MessageBus(
            trader_id=TestIdStubs.trader_id(),
            clock=self.clock,
            logger=self.logger,
        )

        self.cache = TestComponentStubs.cache()
        self.cache.add_instrument(BetfairTestStubs.betting_instrument())
Beispiel #25
0
    def __init__(
        self,
        strategies: List[TradingStrategy],
        config: Dict[str, object],
    ):
        """
        Initialize a new instance of the TradingNode class.

        Parameters
        ----------
        strategies : list[TradingStrategy]
            The list of strategies to run on the trading node.
        config : dict[str, object]
            The configuration for the trading node.

        Raises
        ------
        ValueError
            If strategies is None or empty.
        ValueError
            If config is None or empty.

        """
        PyCondition.not_none(strategies, "strategies")
        PyCondition.not_none(config, "config")
        PyCondition.not_empty(strategies, "strategies")
        PyCondition.not_empty(config, "config")

        # Extract configs
        config_trader = config.get("trader", {})
        config_log = config.get("logging", {})
        config_exec_db = config.get("exec_database", {})
        config_strategy = config.get("strategy", {})
        config_adapters = config.get("adapters", {})

        self._uuid_factory = UUIDFactory()
        self._loop = asyncio.get_event_loop()
        self._executor = concurrent.futures.ThreadPoolExecutor()
        self._loop.set_default_executor(self._executor)
        self._clock = LiveClock(loop=self._loop)

        self.created_time = self._clock.utc_now()
        self._is_running = False

        # Uncomment for debugging
        # self._loop.set_debug(True)

        # Setup identifiers
        self.trader_id = TraderId(
            name=config_trader["name"],
            tag=config_trader["id_tag"],
        )

        # Setup logging
        self._logger = LiveLogger(
            clock=self._clock,
            name=self.trader_id.value,
            level_console=LogLevelParser.from_str_py(config_log.get("log_level_console")),
            level_file=LogLevelParser.from_str_py(config_log.get("log_level_file")),
            level_store=LogLevelParser.from_str_py(config_log.get("log_level_store")),
            run_in_process=config_log.get("run_in_process", True),  # Run logger in a separate process
            log_thread=config_log.get("log_thread_id", False),
            log_to_file=config_log.get("log_to_file", False),
            log_file_path=config_log.get("log_file_path", ""),
        )

        self._log = LoggerAdapter(component_name=self.__class__.__name__, logger=self._logger)
        self._log_header()
        self._log.info("Building...")

        self._setup_loop()  # Requires the logger to be initialized

        self.portfolio = Portfolio(
            clock=self._clock,
            logger=self._logger,
        )

        self._data_engine = LiveDataEngine(
            loop=self._loop,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=self._logger,
            config={"qsize": 10000},
        )

        self.portfolio.register_cache(self._data_engine.cache)
        self.analyzer = PerformanceAnalyzer()

        if config_exec_db["type"] == "redis":
            exec_db = RedisExecutionDatabase(
                trader_id=self.trader_id,
                logger=self._logger,
                command_serializer=MsgPackCommandSerializer(),
                event_serializer=MsgPackEventSerializer(),
                config={
                    "host": config_exec_db["host"],
                    "port": config_exec_db["port"],
                }
            )
        else:
            exec_db = BypassExecutionDatabase(
                trader_id=self.trader_id,
                logger=self._logger,
            )

        self._exec_engine = LiveExecutionEngine(
            loop=self._loop,
            database=exec_db,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=self._logger,
            config={"qsize": 10000},
        )

        self._exec_engine.load_cache()
        self._setup_adapters(config_adapters, self._logger)

        self.trader = Trader(
            trader_id=self.trader_id,
            strategies=strategies,
            portfolio=self.portfolio,
            data_engine=self._data_engine,
            exec_engine=self._exec_engine,
            clock=self._clock,
            logger=self._logger,
        )

        self._check_residuals_delay = config_trader.get("check_residuals_delay", 5.0)
        self._load_strategy_state = config_strategy.get("load_state", True)
        self._save_strategy_state = config_strategy.get("save_state", True)

        if self._load_strategy_state:
            self.trader.load()

        self._log.info("state=INITIALIZED.")
        self.time_to_initialize = self._clock.delta(self.created_time)
        self._log.info(f"Initialized in {self.time_to_initialize.total_seconds():.3f}s.")
Beispiel #26
0
class TradingNode:
    """
    Provides an asynchronous network node for live trading.
    """

    def __init__(
        self,
        strategies: List[TradingStrategy],
        config: Dict[str, object],
    ):
        """
        Initialize a new instance of the TradingNode class.

        Parameters
        ----------
        strategies : list[TradingStrategy]
            The list of strategies to run on the trading node.
        config : dict[str, object]
            The configuration for the trading node.

        Raises
        ------
        ValueError
            If strategies is None or empty.
        ValueError
            If config is None or empty.

        """
        PyCondition.not_none(strategies, "strategies")
        PyCondition.not_none(config, "config")
        PyCondition.not_empty(strategies, "strategies")
        PyCondition.not_empty(config, "config")

        # Extract configs
        config_trader = config.get("trader", {})
        config_log = config.get("logging", {})
        config_exec_db = config.get("exec_database", {})
        config_strategy = config.get("strategy", {})
        config_adapters = config.get("adapters", {})

        self._uuid_factory = UUIDFactory()
        self._loop = asyncio.get_event_loop()
        self._executor = concurrent.futures.ThreadPoolExecutor()
        self._loop.set_default_executor(self._executor)
        self._clock = LiveClock(loop=self._loop)

        self.created_time = self._clock.utc_now()
        self._is_running = False

        # Uncomment for debugging
        # self._loop.set_debug(True)

        # Setup identifiers
        self.trader_id = TraderId(
            name=config_trader["name"],
            tag=config_trader["id_tag"],
        )

        # Setup logging
        self._logger = LiveLogger(
            clock=self._clock,
            name=self.trader_id.value,
            level_console=LogLevelParser.from_str_py(config_log.get("log_level_console")),
            level_file=LogLevelParser.from_str_py(config_log.get("log_level_file")),
            level_store=LogLevelParser.from_str_py(config_log.get("log_level_store")),
            run_in_process=config_log.get("run_in_process", True),  # Run logger in a separate process
            log_thread=config_log.get("log_thread_id", False),
            log_to_file=config_log.get("log_to_file", False),
            log_file_path=config_log.get("log_file_path", ""),
        )

        self._log = LoggerAdapter(component_name=self.__class__.__name__, logger=self._logger)
        self._log_header()
        self._log.info("Building...")

        self._setup_loop()  # Requires the logger to be initialized

        self.portfolio = Portfolio(
            clock=self._clock,
            logger=self._logger,
        )

        self._data_engine = LiveDataEngine(
            loop=self._loop,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=self._logger,
            config={"qsize": 10000},
        )

        self.portfolio.register_cache(self._data_engine.cache)
        self.analyzer = PerformanceAnalyzer()

        if config_exec_db["type"] == "redis":
            exec_db = RedisExecutionDatabase(
                trader_id=self.trader_id,
                logger=self._logger,
                command_serializer=MsgPackCommandSerializer(),
                event_serializer=MsgPackEventSerializer(),
                config={
                    "host": config_exec_db["host"],
                    "port": config_exec_db["port"],
                }
            )
        else:
            exec_db = BypassExecutionDatabase(
                trader_id=self.trader_id,
                logger=self._logger,
            )

        self._exec_engine = LiveExecutionEngine(
            loop=self._loop,
            database=exec_db,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=self._logger,
            config={"qsize": 10000},
        )

        self._exec_engine.load_cache()
        self._setup_adapters(config_adapters, self._logger)

        self.trader = Trader(
            trader_id=self.trader_id,
            strategies=strategies,
            portfolio=self.portfolio,
            data_engine=self._data_engine,
            exec_engine=self._exec_engine,
            clock=self._clock,
            logger=self._logger,
        )

        self._check_residuals_delay = config_trader.get("check_residuals_delay", 5.0)
        self._load_strategy_state = config_strategy.get("load_state", True)
        self._save_strategy_state = config_strategy.get("save_state", True)

        if self._load_strategy_state:
            self.trader.load()

        self._log.info("state=INITIALIZED.")
        self.time_to_initialize = self._clock.delta(self.created_time)
        self._log.info(f"Initialized in {self.time_to_initialize.total_seconds():.3f}s.")

    @property
    def is_running(self) -> bool:
        """
        If the trading node is running.

        Returns
        -------
        bool
            True if running, else False.

        """
        return self._is_running

    def get_event_loop(self) -> asyncio.AbstractEventLoop:
        """
        Return the event loop of the trading node.

        Returns
        -------
        asyncio.AbstractEventLoop

        """
        return self._loop

    def get_logger(self) -> LiveLogger:
        """
        Return the logger for the trading node.

        Returns
        -------
        LiveLogger

        """
        return self._logger

    def start(self) -> None:
        """
        Start the trading node.
        """
        try:
            if self._loop.is_running():
                self._loop.create_task(self._run())
            else:
                self._loop.run_until_complete(self._run())

        except RuntimeError as ex:
            self._log.exception(ex)

    def stop(self) -> None:
        """
        Stop the trading node gracefully.

        After a specified delay the internal `Trader` residuals will be checked.

        If save strategy is specified then strategy states will then be saved.

        """
        try:
            if self._loop.is_running():
                self._loop.create_task(self._stop())
            else:
                self._loop.run_until_complete(self._stop())
        except RuntimeError as ex:
            self._log.exception(ex)

    def dispose(self) -> None:
        """
        Dispose of the trading node.

        Gracefully shuts down the executor and event loop.

        """
        try:
            timeout = self._clock.utc_now() + timedelta(seconds=5)
            while self._is_running:
                time.sleep(0.1)
                if self._clock.utc_now() >= timeout:
                    self._log.warning("Timed out (5s) waiting for node to stop.")
                    break

            self._log.info("state=DISPOSING...")

            self._log.debug(f"{self._data_engine.get_run_queue_task()}")
            self._log.debug(f"{self._exec_engine.get_run_queue_task()}")

            self.trader.dispose()
            self._data_engine.dispose()
            self._exec_engine.dispose()

            self._log.info("Shutting down executor...")
            if sys.version_info >= (3, 9):
                # cancel_futures added in Python 3.9
                self._executor.shutdown(wait=True, cancel_futures=True)
            else:
                self._executor.shutdown(wait=True)

            self._log.info("Stopping event loop...")
            self._loop.stop()
            self._cancel_all_tasks()
        except RuntimeError as ex:
            self._log.error("Shutdown coro issues will be fixed soon...")  # TODO: Remove when fixed
            self._log.exception(ex)
        finally:
            if self._loop.is_running():
                self._log.warning("Cannot close a running event loop.")
            else:
                self._log.info("Closing event loop...")
                self._loop.close()

            # Check and log if event loop is running
            if self._loop.is_running():
                self._log.warning(f"loop.is_running={self._loop.is_running()}")
            else:
                self._log.info(f"loop.is_running={self._loop.is_running()}")

            # Check and log if event loop is closed
            if not self._loop.is_closed():
                self._log.warning(f"loop.is_closed={self._loop.is_closed()}")
            else:
                self._log.info(f"loop.is_closed={self._loop.is_closed()}")

            self._log.info("state=DISPOSED.")
            self._logger.stop()  # Ensure process is stopped
            time.sleep(0.1)      # Ensure final log messages

    def _log_header(self) -> None:
        nautilus_header(self._log)
        self._log.info(f"redis {redis.__version__}")
        self._log.info(f"msgpack {msgpack.version[0]}.{msgpack.version[1]}.{msgpack.version[2]}")
        if uvloop_version:
            self._log.info(f"uvloop {uvloop_version}")
        self._log.info("=================================================================")

    def _setup_loop(self) -> None:
        if self._loop.is_closed():
            self._log.error("Cannot setup signal handling (event loop was closed).")
            return

        signal.signal(signal.SIGINT, signal.SIG_DFL)
        signals = (signal.SIGTERM, signal.SIGINT, signal.SIGHUP, signal.SIGABRT)
        for sig in signals:
            self._loop.add_signal_handler(sig, self._loop_sig_handler, sig)
        self._log.debug(f"Event loop {signals} handling setup.")

    def _loop_sig_handler(self, sig: signal.signal) -> None:
        self._loop.remove_signal_handler(signal.SIGTERM)
        self._loop.add_signal_handler(signal.SIGINT, lambda: None)

        self._log.warning(f"Received {sig!s}, shutting down...")
        self.stop()

    def _setup_adapters(self, config: Dict[str, object], logger: LiveLogger) -> None:
        # Setup each data client
        for name, config in config.items():
            if name.startswith("ccxt-"):
                try:
                    import ccxtpro  # TODO: Find a better way of doing this
                except ImportError:
                    raise ImportError("ccxtpro is not installed, "
                                      "installation instructions can be found at https://ccxt.pro")

                client_cls = getattr(ccxtpro, name.partition('-')[2].lower())
                data_client, exec_client = CCXTClientsFactory.create(
                    client_cls=client_cls,
                    config=config,
                    data_engine=self._data_engine,
                    exec_engine=self._exec_engine,
                    clock=self._clock,
                    logger=logger,
                )
            elif name == "oanda":
                data_client = OandaDataClientFactory.create(
                    config=config,
                    data_engine=self._data_engine,
                    clock=self._clock,
                    logger=logger,
                )
                exec_client = None  # TODO: Implement
            else:
                self._log.error(f"No adapter available for `{name}`.")
                continue

            if data_client is not None:
                self._data_engine.register_client(data_client)

            if exec_client is not None:
                self._exec_engine.register_client(exec_client)

    async def _run(self) -> None:
        try:
            self._log.info("state=STARTING...")
            self._is_running = True

            self._data_engine.start()
            self._exec_engine.start()

            result: bool = await self._await_engines_connected()
            if not result:
                return

            result: bool = await self._exec_engine.resolve_state()
            if not result:
                return

            self.trader.start()

            if self._loop.is_running():
                self._log.info("state=RUNNING.")
            else:
                self._log.warning("Event loop is not running.")

            # Continue to run while engines are running...
            await self._data_engine.get_run_queue_task()
            await self._exec_engine.get_run_queue_task()
        except asyncio.CancelledError as ex:
            self._log.error(str(ex))

    async def _await_engines_connected(self) -> bool:
        self._log.info("Waiting for engines to initialize...")

        # The data engine clients will be set as connected when all
        # instruments are received and updated with the data engine.
        # The execution engine clients will be set as connected when all
        # accounts are updated and the current order and position status is
        # confirmed. Thus any delay here will be due to blocking network IO.
        seconds = 5  # Hard coded for now
        timeout: timedelta = self._clock.utc_now() + timedelta(seconds=seconds)
        while True:
            await asyncio.sleep(0.1)
            if self._clock.utc_now() >= timeout:
                self._log.error(f"Timed out ({seconds}s) waiting for "
                                f"engines to initialize.")
                return False
            if not self._data_engine.check_connected():
                continue
            if not self._exec_engine.check_connected():
                continue
            break

        return True  # Engines initialized

    async def _stop(self) -> None:
        self._is_stopping = True
        self._log.info("state=STOPPING...")

        if self.trader.state == ComponentState.RUNNING:
            self.trader.stop()
            self._log.info(f"Awaiting residual state ({self._check_residuals_delay}s delay)...")
            await asyncio.sleep(self._check_residuals_delay)
            self.trader.check_residuals()

        if self._save_strategy_state:
            self.trader.save()

        if self._data_engine.state == ComponentState.RUNNING:
            self._data_engine.stop()
        if self._exec_engine.state == ComponentState.RUNNING:
            self._exec_engine.stop()

        await self._await_engines_disconnected()

        # Clean up remaining timers
        timer_names = self._clock.timer_names()
        self._clock.cancel_timers()

        for name in timer_names:
            self._log.info(f"Cancelled Timer(name={name}).")

        self._log.info("state=STOPPED.")
        self._is_running = False

    async def _await_engines_disconnected(self) -> None:
        self._log.info("Waiting for engines to disconnect...")

        seconds = 5  # Hard coded for now
        timeout: timedelta = self._clock.utc_now() + timedelta(seconds=seconds)
        while True:
            await asyncio.sleep(0.1)
            if self._clock.utc_now() >= timeout:
                self._log.warning(f"Timed out ({seconds}s) waiting for engines to disconnect.")
                break
            if not self._data_engine.check_disconnected():
                continue
            if not self._exec_engine.check_disconnected():
                continue
            break  # Engines initialized

    def _cancel_all_tasks(self) -> None:
        to_cancel = asyncio.tasks.all_tasks(self._loop)
        if not to_cancel:
            self._log.info("All tasks finished.")
            return

        for task in to_cancel:
            self._log.warning(f"Cancelling pending task {task}")
            task.cancel()

        if self._loop.is_running():
            self._log.warning("Event loop still running during `cancel_all_tasks`.")
            return

        finish_all_tasks: asyncio.Future = asyncio.tasks.gather(
            *to_cancel,
            loop=self._loop,
            return_exceptions=True,
        )
        self._loop.run_until_complete(finish_all_tasks)

        self._log.debug(f"{finish_all_tasks}")

        for task in to_cancel:
            if task.cancelled():
                continue
            if task.exception() is not None:
                self._loop.call_exception_handler({
                    'message': 'unhandled exception during asyncio.run() shutdown',
                    'exception': task.exception(),
                    'task': task,
                })
Beispiel #27
0
    def setUp(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.trader_id = TraderId("TESTER", "001")

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        # Setup logging
        self.logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
        )

        self.portfolio = Portfolio(
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
        )

        # Setup mock CCXT exchange
        with open(TEST_PATH + "markets.json") as response:
            markets = json.load(response)

        with open(TEST_PATH + "currencies.json") as response:
            currencies = json.load(response)

        with open(TEST_PATH + "watch_order_book.json") as response:
            order_book = json.load(response)

        with open(TEST_PATH + "fetch_trades.json") as response:
            fetch_trades = json.load(response)

        with open(TEST_PATH + "watch_trades.json") as response:
            watch_trades = json.load(response)

        self.mock_ccxt = MagicMock()
        self.mock_ccxt.name = "Binance"
        self.mock_ccxt.precisionMode = 2
        self.mock_ccxt.markets = markets
        self.mock_ccxt.currencies = currencies
        self.mock_ccxt.watch_order_book = order_book
        self.mock_ccxt.watch_trades = watch_trades
        self.mock_ccxt.fetch_trades = fetch_trades

        self.client = CCXTDataClient(
            client=self.mock_ccxt,
            engine=self.data_engine,
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine.register_client(self.client)
Beispiel #28
0
 def live_logger(clock):
     return LiveLogger(loop=asyncio.get_event_loop(), clock=clock)
    def setUp(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.trader_id = TraderId("TESTER", "001")
        self.account_id = AccountId("BINANCE", "001")

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        # Setup logging
        logger = LiveLogger(
            clock=self.clock,
            name=self.trader_id.value,
            level_console=LogLevel.INFO,
            level_file=LogLevel.DEBUG,
            level_store=LogLevel.WARNING,
            run_in_process=False,
        )

        self.logger = LiveLogger(self.clock)

        self.portfolio = Portfolio(
            clock=self.clock,
            logger=self.logger,
        )

        database = BypassExecutionDatabase(trader_id=self.trader_id,
                                           logger=self.logger)
        self.exec_engine = LiveExecutionEngine(
            loop=self.loop,
            database=database,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
        )

        with open(TEST_PATH + "markets.json") as response:
            markets = json.load(response)

        with open(TEST_PATH + "currencies.json") as response:
            currencies = json.load(response)

        with open(TEST_PATH + "fetch_balance.json") as response:
            fetch_balance = json.load(response)

        with open(TEST_PATH + "watch_balance.json") as response:
            watch_balance = json.load(response)

        self.mock_ccxt = MagicMock()
        self.mock_ccxt.name = "Binance"
        self.mock_ccxt.precisionMode = 2
        self.mock_ccxt.has = {
            "fetchBalance": True,
            "watchBalance": True,
            "watchMyTrades": True,
        }
        self.mock_ccxt.markets = markets
        self.mock_ccxt.currencies = currencies
        self.mock_ccxt.fetch_balance = fetch_balance
        self.mock_ccxt.watch_balance = watch_balance

        self.client = BinanceExecutionClient(
            client=self.mock_ccxt,
            account_id=self.account_id,
            engine=self.exec_engine,
            clock=self.clock,
            logger=logger,
        )

        self.exec_engine.register_client(self.client)
Beispiel #30
0
    def __init__(
        self,
        strategies: List[TradingStrategy],
        config: Dict[str, object],
    ):
        """
        Initialize a new instance of the TradingNode class.

        Parameters
        ----------
        strategies : list[TradingStrategy]
            The list of strategies to run on the trading node.
        config : dict[str, object]
            The configuration for the trading node.

        """
        if strategies is None:
            strategies = []

        config_trader = config.get("trader", {})
        config_log = config.get("logging", {})
        config_exec_db = config.get("exec_database", {})
        config_strategy = config.get("strategy", {})
        config_data_clients = config.get("data_clients", {})
        config_exec_clients = config.get("exec_clients", {})

        self._clock = LiveClock()
        self._uuid_factory = UUIDFactory()
        self._loop = asyncio.get_event_loop()
        self._executor = concurrent.futures.ThreadPoolExecutor()
        self._loop.set_default_executor(self._executor)
        self._loop.set_debug(False)  # TODO: Development
        self._is_running = False

        # Setup identifiers
        self.trader_id = TraderId(
            name=config_trader["name"],
            tag=config_trader["id_tag"],
        )

        # Setup logging
        logger = LiveLogger(
            clock=self._clock,
            name=self.trader_id.value,
            level_console=LogLevelParser.from_str_py(
                config_log.get("log_level_console")),
            level_file=LogLevelParser.from_str_py(
                config_log.get("log_level_file")),
            level_store=LogLevelParser.from_str_py(
                config_log.get("log_level_store")),
            log_thread=config_log.get("log_thread_id", True),
            log_to_file=config_log.get("log_to_file", False),
            log_file_path=config_log.get("log_file_path", ""),
        )

        self._log = LoggerAdapter(component_name=self.__class__.__name__,
                                  logger=logger)
        self._log_header()
        self._log.info("Building...")

        self.portfolio = Portfolio(
            clock=self._clock,
            logger=logger,
        )

        self._data_engine = LiveDataEngine(
            loop=self._loop,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=logger,
        )

        self.portfolio.register_cache(self._data_engine.cache)
        self.analyzer = PerformanceAnalyzer()

        if config_exec_db["type"] == "redis":
            exec_db = RedisExecutionDatabase(
                trader_id=self.trader_id,
                logger=logger,
                command_serializer=MsgPackCommandSerializer(),
                event_serializer=MsgPackEventSerializer(),
                config={
                    "host": config_exec_db["host"],
                    "port": config_exec_db["port"],
                })
        else:
            exec_db = BypassExecutionDatabase(
                trader_id=self.trader_id,
                logger=logger,
            )

        self._exec_engine = LiveExecutionEngine(
            loop=self._loop,
            database=exec_db,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=logger,
        )

        self._exec_engine.load_cache()
        self._setup_data_clients(config_data_clients, logger)
        self._setup_exec_clients(config_exec_clients, logger)

        self.trader = Trader(
            trader_id=self.trader_id,
            strategies=strategies,
            data_engine=self._data_engine,
            exec_engine=self._exec_engine,
            clock=self._clock,
            logger=logger,
        )

        self._check_residuals_delay = 2.0  # Hard coded delay (refactor)
        self._load_strategy_state = config_strategy.get("load_state", True)
        self._save_strategy_state = config_strategy.get("save_state", True)

        if self._load_strategy_state:
            self.trader.load()

        self._setup_loop()
        self._log.info("state=INITIALIZED.")