def test_message_qsize_at_max_blocks_on_receive_response(self):
        # Arrange
        self.data_engine = LiveDataEngine(loop=self.loop,
                                          portfolio=self.portfolio,
                                          clock=self.clock,
                                          logger=self.logger,
                                          config={"qsize": 1})

        response = DataResponse(
            venue=Venue("BINANCE"),
            data_type=QuoteTick,
            metadata={},
            data=[],
            correlation_id=self.uuid_factory.generate(),
            response_id=self.uuid_factory.generate(),
            response_timestamp=self.clock.utc_now(),
        )

        # Act
        self.data_engine.receive(response)
        self.data_engine.receive(response)  # Add over max size

        # Assert
        self.assertEqual(1, self.data_engine.message_qsize())
        self.assertEqual(0, self.data_engine.command_count)
Ejemplo n.º 2
0
    def setup(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.trader_id = TraderId("TESTER", "001")

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        self.logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
        )

        self.portfolio = Portfolio(
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
        )

        database = BypassExecutionDatabase(trader_id=self.trader_id, logger=self.logger)
        self.exec_engine = LiveExecutionEngine(
            loop=self.loop,
            database=database,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
        )
Ejemplo n.º 3
0
    def test_message_qsize_at_max_blocks_on_receive_response(self):
        # Arrange
        self.engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
            config={"qsize": 1},
        )

        response = DataResponse(
            client_id=ClientId("BINANCE"),
            data_type=DataType(QuoteTick),
            data=[],
            correlation_id=self.uuid_factory.generate(),
            response_id=self.uuid_factory.generate(),
            timestamp_ns=self.clock.timestamp_ns(),
        )

        # Act
        self.engine.receive(response)
        self.engine.receive(response)  # Add over max size

        # Assert
        self.assertEqual(1, self.engine.message_qsize())
        self.assertEqual(0, self.engine.command_count)
    def setUp(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.logger = Logger(self.clock, level_stdout=LogLevel.DEBUG)

        self.cache = TestStubs.cache()

        self.portfolio = Portfolio(
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        self.engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.client = LiveMarketDataClient(
            client_id=ClientId(BINANCE.value),
            engine=self.engine,
            clock=self.clock,
            logger=self.logger,
        )
Ejemplo n.º 5
0
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.loop.set_debug(True)

        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.logger = Logger(self.clock)

        self.trader_id = TestStubs.trader_id()

        self.msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self.clock,
            logger=self.logger,
        )

        self.cache = TestStubs.cache()

        self.portfolio = Portfolio(
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )
Ejemplo n.º 6
0
    async def test_message_qsize_at_max_blocks_on_put_data_command(self):
        # Arrange
        self.msgbus.deregister(endpoint="DataEngine.execute",
                               handler=self.engine.execute)
        self.msgbus.deregister(endpoint="DataEngine.process",
                               handler=self.engine.process)
        self.msgbus.deregister(endpoint="DataEngine.request",
                               handler=self.engine.request)
        self.msgbus.deregister(endpoint="DataEngine.response",
                               handler=self.engine.response)

        self.engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
            config=LiveDataEngineConfig(qsize=1),
        )

        subscribe = Subscribe(
            client_id=ClientId(BINANCE.value),
            data_type=DataType(QuoteTick),
            command_id=self.uuid_factory.generate(),
            ts_init=self.clock.timestamp_ns(),
        )

        # Act
        self.engine.execute(subscribe)
        self.engine.execute(subscribe)
        await asyncio.sleep(0.1)

        # Assert
        assert self.engine.message_qsize() == 1
        assert self.engine.command_count == 0
Ejemplo n.º 7
0
    def test_message_qsize_at_max_blocks_on_put_data_command(self):
        # Arrange
        self.engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
            config={"qsize": 1},
        )

        subscribe = Subscribe(
            client_id=ClientId(BINANCE.value),
            data_type=DataType(QuoteTick),
            handler=[].append,
            command_id=self.uuid_factory.generate(),
            timestamp_ns=self.clock.timestamp_ns(),
        )

        # Act
        self.engine.execute(subscribe)
        self.engine.execute(subscribe)

        # Assert
        self.assertEqual(1, self.engine.message_qsize())
        self.assertEqual(0, self.engine.command_count)
Ejemplo n.º 8
0
    async def test_data_qsize_at_max_blocks_on_put_data(self):
        # Arrange
        self.msgbus.deregister(endpoint="DataEngine.execute",
                               handler=self.engine.execute)
        self.msgbus.deregister(endpoint="DataEngine.process",
                               handler=self.engine.process)
        self.msgbus.deregister(endpoint="DataEngine.request",
                               handler=self.engine.request)
        self.msgbus.deregister(endpoint="DataEngine.response",
                               handler=self.engine.response)

        self.engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
            config=LiveDataEngineConfig(qsize=1),
        )

        data = Data(1_000_000_000, 1_000_000_000)

        # Act
        self.engine.process(data)
        self.engine.process(data)  # Add over max size
        await asyncio.sleep(0.1)

        # Assert
        assert self.engine.data_qsize() == 1
        assert self.engine.data_count == 0
Ejemplo n.º 9
0
 def data_engine(event_loop, clock, live_logger, portfolio):
     return LiveDataEngine(
         loop=event_loop,
         portfolio=portfolio,
         clock=clock,
         logger=live_logger,
     )
Ejemplo n.º 10
0
    def setUp(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.logger = TestLogger(self.clock, level_console=LogLevel.DEBUG)

        self.portfolio = Portfolio(
            clock=self.clock,
            logger=self.logger,
        )

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        self.engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
        )

        self.client = LiveDataClient(
            venue=BINANCE,
            engine=self.engine,
            clock=self.clock,
            logger=self.logger,
        )
Ejemplo n.º 11
0
    def test_message_qsize_at_max_blocks_on_send_request(self):
        self.data_engine = LiveDataEngine(loop=self.loop,
                                          portfolio=self.portfolio,
                                          clock=self.clock,
                                          logger=self.logger,
                                          config={"qsize": 1})

        handler = []
        request = DataRequest(
            venue=Venue("RANDOM"),
            data_type=QuoteTick,
            metadata={
                "Symbol": Symbol("SOMETHING", Venue("RANDOM")),
                "FromDateTime": None,
                "ToDateTime": None,
                "Limit": 1000,
            },
            callback=handler.append,
            request_id=self.uuid_factory.generate(),
            request_timestamp=self.clock.utc_now(),
        )

        # Act
        self.data_engine.send(request)
        self.data_engine.send(request)

        # Assert
        self.assertEqual(1, self.data_engine.message_qsize())
        self.assertEqual(0, self.data_engine.command_count)
Ejemplo n.º 12
0
 def data_engine(event_loop, msgbus, clock, live_logger):
     return LiveDataEngine(
         loop=event_loop,
         msgbus=msgbus,
         clock=clock,
         logger=live_logger,
     )
Ejemplo n.º 13
0
    def test_message_qsize_at_max_blocks_on_put_data_command(self):
        # Arrange
        self.data_engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
            config={"qsize": 1}
        )

        subscribe = Subscribe(
            venue=BINANCE,
            data_type=QuoteTick,
            metadata={},
            handler=[].append,
            command_id=self.uuid_factory.generate(),
            command_timestamp=self.clock.utc_now(),
        )

        # Act
        self.data_engine.execute(subscribe)
        self.data_engine.execute(subscribe)

        # Assert
        self.assertEqual(1, self.data_engine.message_qsize())
        self.assertEqual(0, self.data_engine.command_count)
    def setup(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.trader_id = TraderId("TESTER-001")

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)
        self.executor = concurrent.futures.ThreadPoolExecutor()
        self.loop.set_default_executor(self.executor)
        self.loop.set_debug(True)

        # Setup logging
        logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
            trader_id=self.trader_id,
            level_stdout=LogLevel.DEBUG,
        )

        self.logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
        )

        self.cache = TestStubs.cache()

        self.portfolio = Portfolio(
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.mock_oanda = MagicMock()

        self.client = OandaDataClient(
            client=self.mock_oanda,
            account_id="001",
            engine=self.data_engine,
            clock=self.clock,
            logger=logger,
        )

        self.data_engine.register_client(self.client)

        with open(TEST_PATH + "instruments.json") as response:
            instruments = json.load(response)

        self.mock_oanda.request.return_value = instruments
Ejemplo n.º 15
0
    async def test_message_qsize_at_max_blocks_on_send_request(self):
        # Arrange
        self.msgbus.deregister(endpoint="DataEngine.execute",
                               handler=self.engine.execute)
        self.msgbus.deregister(endpoint="DataEngine.process",
                               handler=self.engine.process)
        self.msgbus.deregister(endpoint="DataEngine.request",
                               handler=self.engine.request)
        self.msgbus.deregister(endpoint="DataEngine.response",
                               handler=self.engine.response)

        self.engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
            config=LiveDataEngineConfig(qsize=1),
        )

        handler = []
        request = DataRequest(
            client_id=ClientId("RANDOM"),
            venue=None,
            data_type=DataType(
                QuoteTick,
                metadata={
                    "instrument_id":
                    InstrumentId(Symbol("SOMETHING"), Venue("RANDOM")),
                    "from_datetime":
                    None,
                    "to_datetime":
                    None,
                    "limit":
                    1000,
                },
            ),
            callback=handler.append,
            request_id=self.uuid_factory.generate(),
            ts_init=self.clock.timestamp_ns(),
        )

        # Act
        self.engine.request(request)
        self.engine.request(request)
        await asyncio.sleep(0.1)

        # Assert
        assert self.engine.message_qsize() == 1
        assert self.engine.command_count == 0
Ejemplo n.º 16
0
    def test_data_qsize_at_max_blocks_on_put_data(self):
        self.data_engine = LiveDataEngine(loop=self.loop,
                                          portfolio=self.portfolio,
                                          clock=self.clock,
                                          logger=self.logger,
                                          config={"qsize": 1})

        # Act
        self.data_engine.process("some_data")
        self.data_engine.process("some_data")

        # Assert
        self.assertEqual(1, self.data_engine.data_qsize())
        self.assertEqual(0, self.data_engine.data_count)
Ejemplo n.º 17
0
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.loop.set_debug(True)

        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.logger = Logger(self.clock)

        self.trader_id = TestIdStubs.trader_id()

        self.msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self.clock,
            logger=self.logger,
        )

        self.cache = TestComponentStubs.cache()

        self.portfolio = Portfolio(
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.client = LiveMarketDataClient(
            loop=self.loop,
            client_id=ClientId(BINANCE.value),
            venue=BINANCE,
            instrument_provider=InstrumentProvider(
                venue=Venue("SIM"),
                logger=self.logger,
            ),
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )
Ejemplo n.º 18
0
    def test_data_qsize_at_max_blocks_on_put_data(self):
        # Arrange
        self.engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
            config={"qsize": 1},
        )

        data = Data(1_000_000_000)

        # Act
        self.engine.process(data)
        self.engine.process(data)  # Add over max size

        # Assert
        self.assertEqual(1, self.engine.data_qsize())
        self.assertEqual(0, self.engine.data_count)
Ejemplo n.º 19
0
    def test_message_qsize_at_max_blocks_on_send_request(self):
        # Arrange
        self.engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
            config={"qsize": 1},
        )

        handler = []
        request = DataRequest(
            client_id=ClientId("RANDOM"),
            data_type=DataType(
                QuoteTick,
                metadata={
                    "instrument_id":
                    InstrumentId(Symbol("SOMETHING"), Venue("RANDOM")),
                    "from_datetime":
                    None,
                    "to_datetime":
                    None,
                    "limit":
                    1000,
                },
            ),
            callback=handler.append,
            request_id=self.uuid_factory.generate(),
            timestamp_ns=self.clock.timestamp_ns(),
        )

        # Act
        self.engine.send(request)
        self.engine.send(request)

        # Assert
        self.assertEqual(1, self.engine.message_qsize())
        self.assertEqual(0, self.engine.command_count)
Ejemplo n.º 20
0
    async def test_message_qsize_at_max_blocks_on_receive_response(self):
        # Arrange
        self.msgbus.deregister(endpoint="DataEngine.execute",
                               handler=self.engine.execute)
        self.msgbus.deregister(endpoint="DataEngine.process",
                               handler=self.engine.process)
        self.msgbus.deregister(endpoint="DataEngine.request",
                               handler=self.engine.request)
        self.msgbus.deregister(endpoint="DataEngine.response",
                               handler=self.engine.response)

        self.engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
            config=LiveDataEngineConfig(qsize=1),
        )

        response = DataResponse(
            client_id=ClientId("BINANCE"),
            venue=BINANCE,
            data_type=DataType(QuoteTick),
            data=[],
            correlation_id=self.uuid_factory.generate(),
            response_id=self.uuid_factory.generate(),
            ts_init=self.clock.timestamp_ns(),
        )

        # Act
        self.engine.response(response)
        self.engine.response(response)  # Add over max size
        await asyncio.sleep(0.1)

        # Assert
        assert self.engine.message_qsize() == 1
        assert self.engine.command_count == 0
    def setup(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.trader_id = TraderId("TESTER-001")

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        self.logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
        )

        self.cache = TestStubs.cache()

        self.portfolio = Portfolio(
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.exec_engine = LiveExecutionEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )
Ejemplo n.º 22
0
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.loop.set_debug(True)

        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.logger = Logger(self.clock)

        self.trader_id = TestIdStubs.trader_id()

        self.msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self.clock,
            logger=self.logger,
        )

        self.cache = TestComponentStubs.cache()

        self.engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.client = LiveDataClient(
            loop=self.loop,
            client_id=ClientId("BLOOMBERG"),
            venue=None,  # Multi-venue
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )
Ejemplo n.º 23
0
    def __init__(
        self,
        strategies: List[TradingStrategy],
        config: Dict[str, object],
    ):
        """
        Initialize a new instance of the TradingNode class.

        Parameters
        ----------
        strategies : list[TradingStrategy]
            The list of strategies to run on the trading node.
        config : dict[str, object]
            The configuration for the trading node.

        Raises
        ------
        ValueError
            If strategies is None or empty.
        ValueError
            If config is None or empty.

        """
        PyCondition.not_none(strategies, "strategies")
        PyCondition.not_none(config, "config")
        PyCondition.not_empty(strategies, "strategies")
        PyCondition.not_empty(config, "config")

        # Extract configs
        config_trader = config.get("trader", {})
        config_log = config.get("logging", {})
        config_exec_db = config.get("exec_database", {})
        config_strategy = config.get("strategy", {})
        config_adapters = config.get("adapters", {})

        self._uuid_factory = UUIDFactory()
        self._loop = asyncio.get_event_loop()
        self._executor = concurrent.futures.ThreadPoolExecutor()
        self._loop.set_default_executor(self._executor)
        self._clock = LiveClock(loop=self._loop)

        self.created_time = self._clock.utc_now()
        self._is_running = False

        # Uncomment for debugging
        # self._loop.set_debug(True)

        # Setup identifiers
        self.trader_id = TraderId(
            name=config_trader["name"],
            tag=config_trader["id_tag"],
        )

        # Setup logging
        self._logger = LiveLogger(
            clock=self._clock,
            name=self.trader_id.value,
            level_console=LogLevelParser.from_str_py(config_log.get("log_level_console")),
            level_file=LogLevelParser.from_str_py(config_log.get("log_level_file")),
            level_store=LogLevelParser.from_str_py(config_log.get("log_level_store")),
            run_in_process=config_log.get("run_in_process", True),  # Run logger in a separate process
            log_thread=config_log.get("log_thread_id", False),
            log_to_file=config_log.get("log_to_file", False),
            log_file_path=config_log.get("log_file_path", ""),
        )

        self._log = LoggerAdapter(component_name=self.__class__.__name__, logger=self._logger)
        self._log_header()
        self._log.info("Building...")

        self._setup_loop()  # Requires the logger to be initialized

        self.portfolio = Portfolio(
            clock=self._clock,
            logger=self._logger,
        )

        self._data_engine = LiveDataEngine(
            loop=self._loop,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=self._logger,
            config={"qsize": 10000},
        )

        self.portfolio.register_cache(self._data_engine.cache)
        self.analyzer = PerformanceAnalyzer()

        if config_exec_db["type"] == "redis":
            exec_db = RedisExecutionDatabase(
                trader_id=self.trader_id,
                logger=self._logger,
                command_serializer=MsgPackCommandSerializer(),
                event_serializer=MsgPackEventSerializer(),
                config={
                    "host": config_exec_db["host"],
                    "port": config_exec_db["port"],
                }
            )
        else:
            exec_db = BypassExecutionDatabase(
                trader_id=self.trader_id,
                logger=self._logger,
            )

        self._exec_engine = LiveExecutionEngine(
            loop=self._loop,
            database=exec_db,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=self._logger,
            config={"qsize": 10000},
        )

        self._exec_engine.load_cache()
        self._setup_adapters(config_adapters, self._logger)

        self.trader = Trader(
            trader_id=self.trader_id,
            strategies=strategies,
            portfolio=self.portfolio,
            data_engine=self._data_engine,
            exec_engine=self._exec_engine,
            clock=self._clock,
            logger=self._logger,
        )

        self._check_residuals_delay = config_trader.get("check_residuals_delay", 5.0)
        self._load_strategy_state = config_strategy.get("load_state", True)
        self._save_strategy_state = config_strategy.get("save_state", True)

        if self._load_strategy_state:
            self.trader.load()

        self._log.info("state=INITIALIZED.")
        self.time_to_initialize = self._clock.delta(self.created_time)
        self._log.info(f"Initialized in {self.time_to_initialize.total_seconds():.3f}s.")
Ejemplo n.º 24
0
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.loop.set_debug(True)

        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.logger = Logger(self.clock)

        self.trader_id = TestStubs.trader_id()

        self.order_factory = OrderFactory(
            trader_id=self.trader_id,
            strategy_id=StrategyId("S-001"),
            clock=self.clock,
        )

        self.random_order_factory = OrderFactory(
            trader_id=TraderId("RANDOM-042"),
            strategy_id=StrategyId("S-042"),
            clock=self.clock,
        )

        self.msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self.clock,
            logger=self.logger,
        )

        self.cache = TestStubs.cache()

        self.portfolio = Portfolio(
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.exec_engine = LiveExecutionEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.risk_engine = LiveRiskEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.instrument_provider = InstrumentProvider()
        self.instrument_provider.add(AUDUSD_SIM)
        self.instrument_provider.add(GBPUSD_SIM)

        self.client = MockLiveExecutionClient(
            loop=self.loop,
            client_id=ClientId(SIM.value),
            venue_type=VenueType.ECN,
            account_id=TestStubs.account_id(),
            account_type=AccountType.CASH,
            base_currency=USD,
            instrument_provider=self.instrument_provider,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )
        self.portfolio.update_account(TestStubs.event_cash_account_state())
        self.exec_engine.register_client(self.client)

        self.cache.add_instrument(AUDUSD_SIM)
Ejemplo n.º 25
0
    def setUp(self):
        # Fixture Setup
        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.trader_id = TraderId("TESTER", "001")

        # Fresh isolated loop testing pattern
        self.loop = asyncio.new_event_loop()
        asyncio.set_event_loop(self.loop)

        # Setup logging
        self.logger = LiveLogger(
            loop=self.loop,
            clock=self.clock,
        )

        self.portfolio = Portfolio(
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            clock=self.clock,
            logger=self.logger,
        )

        # Setup mock CCXT exchange
        with open(TEST_PATH + "markets.json") as response:
            markets = json.load(response)

        with open(TEST_PATH + "currencies.json") as response:
            currencies = json.load(response)

        with open(TEST_PATH + "watch_order_book.json") as response:
            order_book = json.load(response)

        with open(TEST_PATH + "fetch_trades.json") as response:
            fetch_trades = json.load(response)

        with open(TEST_PATH + "watch_trades.json") as response:
            watch_trades = json.load(response)

        self.mock_ccxt = MagicMock()
        self.mock_ccxt.name = "Binance"
        self.mock_ccxt.precisionMode = 2
        self.mock_ccxt.markets = markets
        self.mock_ccxt.currencies = currencies
        self.mock_ccxt.watch_order_book = order_book
        self.mock_ccxt.watch_trades = watch_trades
        self.mock_ccxt.fetch_trades = fetch_trades

        self.client = CCXTDataClient(
            client=self.mock_ccxt,
            engine=self.data_engine,
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine.register_client(self.client)
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.loop.set_debug(True)

        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.logger = Logger(self.clock)

        self.trader_id = TestStubs.trader_id()
        self.account_id = TestStubs.account_id()

        self.order_factory = OrderFactory(
            trader_id=self.trader_id,
            strategy_id=StrategyId("S-001"),
            clock=self.clock,
        )

        self.random_order_factory = OrderFactory(
            trader_id=TraderId("RANDOM-042"),
            strategy_id=StrategyId("S-042"),
            clock=self.clock,
        )

        self.msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self.clock,
            logger=self.logger,
        )

        self.cache = TestStubs.cache()

        self.portfolio = Portfolio(
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.exec_engine = LiveExecutionEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.risk_engine = LiveRiskEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.exec_client = MockExecutionClient(
            client_id=ClientId("SIM"),
            venue_type=VenueType.ECN,
            account_id=TestStubs.account_id(),
            account_type=AccountType.MARGIN,
            base_currency=USD,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        # Wire up components
        self.exec_engine.register_client(self.exec_client)
Ejemplo n.º 27
0
    def __init__(
        self,
        strategies: List[TradingStrategy],
        config: Dict[str, object],
    ):
        """
        Initialize a new instance of the TradingNode class.

        Parameters
        ----------
        strategies : list[TradingStrategy]
            The list of strategies to run on the trading node.
        config : dict[str, object]
            The configuration for the trading node.

        Raises
        ------
        ValueError
            If strategies is None or empty.
        ValueError
            If config is None or empty.

        """
        PyCondition.not_none(strategies, "strategies")
        PyCondition.not_none(config, "config")
        PyCondition.not_empty(strategies, "strategies")
        PyCondition.not_empty(config, "config")

        self._config = config

        # Extract configs
        config_trader = config.get("trader", {})
        config_system = config.get("system", {})
        config_log = config.get("logging", {})
        config_exec_db = config.get("exec_database", {})
        config_risk = config.get("risk", {})
        config_strategy = config.get("strategy", {})

        # System config
        self._connection_timeout = config_system.get("connection_timeout", 5.0)
        self._disconnection_timeout = config_system.get(
            "disconnection_timeout", 5.0)
        self._check_residuals_delay = config_system.get(
            "check_residuals_delay", 5.0)
        self._load_strategy_state = config_strategy.get("load_state", True)
        self._save_strategy_state = config_strategy.get("save_state", True)

        # Setup loop
        self._loop = asyncio.get_event_loop()
        self._executor = concurrent.futures.ThreadPoolExecutor()
        self._loop.set_default_executor(self._executor)
        self._loop.set_debug(config_system.get("loop_debug", False))

        # Components
        self._clock = LiveClock(loop=self._loop)
        self._uuid_factory = UUIDFactory()
        self.system_id = self._uuid_factory.generate()
        self.created_time = self._clock.utc_now()
        self._is_running = False

        # Setup identifiers
        self.trader_id = TraderId(
            name=config_trader["name"],
            tag=config_trader["id_tag"],
        )

        # Setup logging
        level_stdout = LogLevelParser.from_str_py(
            config_log.get("level_stdout"))

        self._logger = LiveLogger(
            loop=self._loop,
            clock=self._clock,
            trader_id=self.trader_id,
            system_id=self.system_id,
            level_stdout=level_stdout,
        )

        self._log = LoggerAdapter(
            component=self.__class__.__name__,
            logger=self._logger,
        )

        self._log_header()
        self._log.info("Building...")

        if platform.system() != "Windows":
            # Requires the logger to be initialized
            # Windows does not support signal handling
            # https://stackoverflow.com/questions/45987985/asyncio-loops-add-signal-handler-in-windows
            self._setup_loop()

        # Build platform
        # ----------------------------------------------------------------------
        self.portfolio = Portfolio(
            clock=self._clock,
            logger=self._logger,
        )

        self._data_engine = LiveDataEngine(
            loop=self._loop,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=self._logger,
            config={"qsize": 10000},
        )

        self.portfolio.register_cache(self._data_engine.cache)
        self.analyzer = PerformanceAnalyzer()

        if config_exec_db["type"] == "redis":
            exec_db = RedisExecutionDatabase(
                trader_id=self.trader_id,
                logger=self._logger,
                command_serializer=MsgPackCommandSerializer(),
                event_serializer=MsgPackEventSerializer(),
                config={
                    "host": config_exec_db["host"],
                    "port": config_exec_db["port"],
                },
            )
        else:
            exec_db = BypassExecutionDatabase(
                trader_id=self.trader_id,
                logger=self._logger,
            )

        self._exec_engine = LiveExecutionEngine(
            loop=self._loop,
            database=exec_db,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=self._logger,
            config={"qsize": 10000},
        )

        self._risk_engine = LiveRiskEngine(
            loop=self._loop,
            exec_engine=self._exec_engine,
            portfolio=self.portfolio,
            clock=self._clock,
            logger=self._logger,
            config=config_risk,
        )

        self._exec_engine.load_cache()
        self._exec_engine.register_risk_engine(self._risk_engine)

        self.trader = Trader(
            trader_id=self.trader_id,
            strategies=strategies,
            portfolio=self.portfolio,
            data_engine=self._data_engine,
            exec_engine=self._exec_engine,
            risk_engine=self._risk_engine,
            clock=self._clock,
            logger=self._logger,
        )

        if self._load_strategy_state:
            self.trader.load()

        self._builder = TradingNodeBuilder(
            data_engine=self._data_engine,
            exec_engine=self._exec_engine,
            risk_engine=self._risk_engine,
            clock=self._clock,
            logger=self._logger,
            log=self._log,
        )

        self._log.info("state=INITIALIZED.")
        self.time_to_initialize = self._clock.delta(self.created_time)
        self._log.info(
            f"Initialized in {self.time_to_initialize.total_seconds():.3f}s.")

        self._is_built = False
Ejemplo n.º 28
0
    def __init__(self, config: Optional[TradingNodeConfig] = None):
        if config is None:
            config = TradingNodeConfig()
        PyCondition.not_none(config, "config")
        PyCondition.type(config, TradingNodeConfig, "config")

        # Configuration
        self._config = config

        # Setup loop
        self._loop = asyncio.get_event_loop()
        self._executor = concurrent.futures.ThreadPoolExecutor()
        self._loop.set_default_executor(self._executor)
        self._loop.set_debug(config.loop_debug)

        # Components
        self._clock = LiveClock(loop=self._loop)
        self._uuid_factory = UUIDFactory()
        self.created_time = self._clock.utc_now()
        self._is_running = False

        # Identifiers
        self.trader_id = TraderId(config.trader_id)
        self.machine_id = socket.gethostname()
        self.instance_id = self._uuid_factory.generate()

        # Setup logging
        self._logger = LiveLogger(
            loop=self._loop,
            clock=self._clock,
            trader_id=self.trader_id,
            machine_id=self.machine_id,
            instance_id=self.instance_id,
            level_stdout=LogLevelParser.from_str_py(config.log_level.upper()),
        )

        self._log = LoggerAdapter(
            component_name=type(self).__name__,
            logger=self._logger,
        )

        self._log_header()
        self._log.info("Building...")

        if platform.system() != "Windows":
            # Windows does not support signal handling
            # https://stackoverflow.com/questions/45987985/asyncio-loops-add-signal-handler-in-windows
            self._setup_loop()

        ########################################################################
        # Build platform
        ########################################################################
        if config.cache_database is None or config.cache_database.type == "in-memory":
            cache_db = None
        elif config.cache_database.type == "redis":
            cache_db = RedisCacheDatabase(
                trader_id=self.trader_id,
                logger=self._logger,
                serializer=MsgPackSerializer(timestamps_as_str=True),
                config=config.cache_database,
            )
        else:  # pragma: no cover (design-time error)
            raise ValueError(
                "The cache_db_type in the configuration is unrecognized, "
                "can one of {{'in-memory', 'redis'}}.",
            )

        self._msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self._clock,
            logger=self._logger,
        )

        self._cache = Cache(
            database=cache_db,
            logger=self._logger,
            config=config.cache,
        )

        self.portfolio = Portfolio(
            msgbus=self._msgbus,
            cache=self._cache,
            clock=self._clock,
            logger=self._logger,
        )

        self._data_engine = LiveDataEngine(
            loop=self._loop,
            msgbus=self._msgbus,
            cache=self._cache,
            clock=self._clock,
            logger=self._logger,
            config=config.data_engine,
        )

        self._exec_engine = LiveExecutionEngine(
            loop=self._loop,
            msgbus=self._msgbus,
            cache=self._cache,
            clock=self._clock,
            logger=self._logger,
            config=config.exec_engine,
        )
        self._exec_engine.load_cache()

        self._risk_engine = LiveRiskEngine(
            loop=self._loop,
            portfolio=self.portfolio,
            msgbus=self._msgbus,
            cache=self._cache,
            clock=self._clock,
            logger=self._logger,
            config=config.risk_engine,
        )

        self.trader = Trader(
            trader_id=self.trader_id,
            msgbus=self._msgbus,
            cache=self._cache,
            portfolio=self.portfolio,
            data_engine=self._data_engine,
            risk_engine=self._risk_engine,
            exec_engine=self._exec_engine,
            clock=self._clock,
            logger=self._logger,
        )

        if config.load_strategy_state:
            self.trader.load()

        # Setup persistence (requires trader)
        self.persistence_writers: List[Any] = []
        if config.persistence:
            self._setup_persistence(config=config.persistence)

        self._builder = TradingNodeBuilder(
            loop=self._loop,
            data_engine=self._data_engine,
            exec_engine=self._exec_engine,
            msgbus=self._msgbus,
            cache=self._cache,
            clock=self._clock,
            logger=self._logger,
            log=self._log,
        )

        self._log.info("INITIALIZED.")
        self.time_to_initialize = self._clock.delta(self.created_time)
        self._log.info(f"Initialized in {int(self.time_to_initialize.total_seconds() * 1000)}ms.")

        self._is_built = False
Ejemplo n.º 29
0
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.loop.set_debug(True)

        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()

        self.trader_id = TestStubs.trader_id()
        self.uuid = UUID4()
        self.venue = BETFAIR_VENUE
        self.account_id = AccountId(self.venue.value, "001")

        # Setup logging
        self.logger = LiveLogger(loop=self.loop,
                                 clock=self.clock,
                                 level_stdout=LogLevel.ERROR)
        self._log = LoggerAdapter("TestBetfairExecutionClient", self.logger)

        self.msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self.clock,
            logger=self.logger,
        )

        self.cache = TestStubs.cache()
        self.cache.add_instrument(BetfairTestStubs.betting_instrument())

        self.portfolio = Portfolio(
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.betfair_client = BetfairTestStubs.betfair_client(
            loop=self.loop, logger=self.logger)

        self.instrument_provider = BetfairTestStubs.instrument_provider(
            betfair_client=self.betfair_client)
        # Add a subset of instruments
        instruments = [
            ins for ins in INSTRUMENTS
            if ins.market_id in BetfairDataProvider.market_ids()
        ]
        self.instrument_provider.add_bulk(instruments)

        self.client = BetfairDataClient(
            loop=self.loop,
            client=self.betfair_client,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
            instrument_provider=self.instrument_provider,
            market_filter={},
        )

        self.data_engine.register_client(self.client)

        # Re-route exec engine messages through `handler`
        self.messages = []

        def handler(x, endpoint):
            self.messages.append(x)
            if endpoint == "execute":
                self.data_engine.execute(x)
            elif endpoint == "process":
                self.data_engine.process(x)
            elif endpoint == "response":
                self.data_engine.response(x)

        self.msgbus.deregister(
            endpoint="DataEngine.execute",
            handler=self.data_engine.execute)  # type: ignore
        self.msgbus.register(
            endpoint="DataEngine.execute",
            handler=partial(handler, endpoint="execute")  # type: ignore
        )

        self.msgbus.deregister(
            endpoint="DataEngine.process",
            handler=self.data_engine.process)  # type: ignore
        self.msgbus.register(
            endpoint="DataEngine.process",
            handler=partial(handler, endpoint="process")  # type: ignore
        )

        self.msgbus.deregister(
            endpoint="DataEngine.response",
            handler=self.data_engine.response)  # type: ignore
        self.msgbus.register(
            endpoint="DataEngine.response",
            handler=partial(handler, endpoint="response")  # type: ignore
        )
Ejemplo n.º 30
0
    def setup(self):
        # Fixture Setup
        self.loop = asyncio.get_event_loop()
        self.loop.set_debug(True)

        self.clock = LiveClock()
        self.uuid_factory = UUIDFactory()
        self.logger = Logger(self.clock, bypass=True)

        self.trader_id = TestIdStubs.trader_id()
        self.account_id = AccountId(BINANCE.value, "001")

        self.msgbus = MessageBus(
            trader_id=self.trader_id,
            clock=self.clock,
            logger=self.logger,
        )

        self.cache = TestComponentStubs.cache()

        self.portfolio = Portfolio(
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.data_engine = LiveDataEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.exec_engine = LiveExecutionEngine(
            loop=self.loop,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.risk_engine = LiveRiskEngine(
            loop=self.loop,
            portfolio=self.portfolio,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )

        self.exec_client = MockExecutionClient(
            client_id=ClientId("BINANCE"),
            venue=BINANCE,
            account_type=AccountType.CASH,
            base_currency=None,  # Multi-currency account
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )
        self.portfolio.update_account(TestEventStubs.margin_account_state())
        self.exec_engine.register_client(self.exec_client)

        self.strategy = TradingStrategy()
        self.strategy.register(
            trader_id=self.trader_id,
            portfolio=self.portfolio,
            msgbus=self.msgbus,
            cache=self.cache,
            clock=self.clock,
            logger=self.logger,
        )