def test_add_generic_data_adds_to_container(self): # Arrange engine = BacktestEngine() data_type = DataType(MyData, metadata={"news_wire": "hacks"}) generic_data1 = [ GenericData(data_type, MyData("AAPL hacked")), GenericData( data_type, MyData("AMZN hacked", 1000, 1000), ), GenericData( data_type, MyData("NFLX hacked", 3000, 3000), ), GenericData( data_type, MyData("MSFT hacked", 2000, 2000), ), ] generic_data2 = [ GenericData( data_type, MyData("FB hacked", 1500, 1500), ), ] # Act engine.add_generic_data(ClientId("NEWS_CLIENT"), generic_data1) engine.add_generic_data(ClientId("NEWS_CLIENT"), generic_data2)
def test_with_mix_of_stream_data_produces_correct_stream_of_data(self): # Assert data = BacktestDataContainer() data.add_instrument(ETHUSDT_BINANCE) snapshot1 = OrderBookSnapshot( instrument_id=ETHUSDT_BINANCE.id, level=OrderBookLevel.L2, bids=[[1550.15, 0.51], [1580.00, 1.20]], asks=[[1552.15, 1.51], [1582.00, 2.20]], timestamp_ns=0, ) data_type = DataType(str, metadata={"news_wire": "hacks"}) generic_data1 = [ GenericData(data_type, data="AAPL hacked", timestamp_ns=0), GenericData(data_type, data="AMZN hacked", timestamp_ns=500_000), GenericData(data_type, data="NFLX hacked", timestamp_ns=1_000_000), GenericData(data_type, data="MSFT hacked", timestamp_ns=2_000_000), ] snapshot2 = OrderBookSnapshot( instrument_id=ETHUSDT_BINANCE.id, level=OrderBookLevel.L2, bids=[[1551.15, 0.51], [1581.00, 1.20]], asks=[[1553.15, 1.51], [1583.00, 2.20]], timestamp_ns=1_000_000, ) data.add_generic_data(ClientId("NEWS_CLIENT"), generic_data1) data.add_order_book_data([snapshot1, snapshot2]) producer = BacktestDataProducer(data=data, logger=self.logger) producer.setup(producer.min_timestamp_ns, producer.max_timestamp_ns) # Act streamed_data = [] while producer.has_data: streamed_data.append(producer.next()) # Assert timestamps = [x.timestamp_ns for x in streamed_data] assert timestamps == [0, 0, 500000, 1000000, 1000000, 2000000] assert producer.min_timestamp_ns == 0 assert producer.max_timestamp_ns == 2_000_000 assert producer.min_timestamp == pd.Timestamp( "1970-01-01 00:00:00.000000+0000", tz="UTC") assert producer.max_timestamp == pd.Timestamp( "1970-01-01 00:00:00.002000+0000", tz="UTC")
def test_handle_data_sends_to_data_engine(self): # Arrange data_type = DataType(str, {"Type": "NEWS_WIRE"}) data = GenericData(data_type, "Some news headline", UNIX_EPOCH) # Act self.client._handle_data_py(data) # Assert self.assertEqual(1, self.data_engine.data_count)
def test_data_instantiation(self): # Arrange # Act data_type = DataType(str, {"type": "NEWS_WIRE"}) data = GenericData(data_type, "Some News Headline", UNIX_EPOCH) # Assert self.assertEqual(data_type, data.data_type) self.assertEqual("Some News Headline", data.data) self.assertEqual(UNIX_EPOCH, data.timestamp)
def test_add_generic_data_adds_to_container(self): # Arrange engine = BacktestEngine() data_type = DataType(str, metadata={"news_wire": "hacks"}) generic_data1 = [ GenericData( data_type, data="AAPL hacked", timestamp_origin_ns=0, timestamp_ns=0 ), GenericData( data_type, data="AMZN hacked", timestamp_origin_ns=1000, timestamp_ns=1000, ), GenericData( data_type, data="NFLX hacked", timestamp_origin_ns=3000, timestamp_ns=3000, ), GenericData( data_type, data="MSFT hacked", timestamp_origin_ns=2000, timestamp_ns=2000, ), ] generic_data2 = [ GenericData( data_type, data="FB hacked", timestamp_origin_ns=1500, timestamp_ns=1500 ), ] # Act engine.add_generic_data(ClientId("NEWS_CLIENT"), generic_data1) engine.add_generic_data(ClientId("NEWS_CLIENT"), generic_data2)
def test_handle_data_response_sends_to_data_engine(self): # Arrange data_type = DataType(str, { "Type": "ECONOMIC_DATA", "topic": "unemployment" }) data = GenericData(data_type, "may 2020, 6.9%", UNIX_EPOCH) # Act self.client._handle_data_response_py(data, self.uuid_factory.generate()) # Assert self.assertEqual(1, self.data_engine.response_count)
def test_add_generic_data_adds_to_container(self): # Arrange data = BacktestDataContainer() data_type = DataType(str, metadata={"news_wire": "hacks"}) generic_data1 = [ GenericData(data_type, data="AAPL hacked", timestamp_ns=0), GenericData(data_type, data="AMZN hacked", timestamp_ns=1000), GenericData(data_type, data="NFLX hacked", timestamp_ns=3000), GenericData(data_type, data="MSFT hacked", timestamp_ns=2000), ] generic_data2 = [ GenericData(data_type, data="FB hacked", timestamp_ns=1500), ] # Act data.add_generic_data(ClientId("NEWS_CLIENT"), generic_data1) data.add_generic_data(ClientId("NEWS_CLIENT"), generic_data2) # Assert assert ClientId("NEWS_CLIENT") in data.clients assert len(data.generic_data) == 5 assert data.generic_data[-1].timestamp_ns == 3000 # sorted
def test_data_instantiation(self): # Arrange # Act data_type = DataType(NewsEvent, {"publisher": "NEWS_WIRE"}) data = NewsEvent( impact=NewsImpact.HIGH, name="Unemployment Rate", currency=USD, ts_event_ns=0, ts_recv_ns=0, ) generic_data = GenericData(data_type, data) # Assert self.assertEqual(data_type, generic_data.data_type) self.assertEqual(data, generic_data.data)
def test_handle_data_sends_to_data_engine(self): # Arrange data_type = DataType(NewsEvent, {"Type": "NEWS_WIRE"}) data = NewsEvent( impact=NewsImpact.HIGH, name="Unemployment Rate", currency=USD, ts_event_ns=0, ts_recv_ns=0, ) generic_data = GenericData(data_type, data) # Act self.client._handle_data_py(generic_data) # Assert self.assertEqual(1, self.data_engine.data_count)
def test_with_mix_of_stream_data_produces_correct_stream_of_data(self): # Assert snapshot1 = OrderBookSnapshot( instrument_id=ETHUSDT_BINANCE.id, level=BookLevel.L2, bids=[[1550.15, 0.51], [1580.00, 1.20]], asks=[[1552.15, 1.51], [1582.00, 2.20]], ts_event_ns=0, ts_recv_ns=0, ) data_type = DataType(MyData, metadata={"news_wire": "hacks"}) generic_data1 = [ GenericData( data_type, data=MyData("AAPL hacked"), ), GenericData( data_type, data=MyData("AMZN hacked", 500_000, 500_000), ), GenericData( data_type, data=MyData("NFLX hacked", 1_000_000, 1_000_000), ), GenericData( data_type, data=MyData("MSFT hacked", 2_000_000, 2_000_000), ), ] snapshot2 = OrderBookSnapshot( instrument_id=ETHUSDT_BINANCE.id, level=BookLevel.L2, bids=[[1551.15, 0.51], [1581.00, 1.20]], asks=[[1553.15, 1.51], [1583.00, 2.20]], ts_event_ns=1_000_000, ts_recv_ns=1_000_000, ) producer = BacktestDataProducer( logger=self.logger, instruments=[ETHUSDT_BINANCE], generic_data=generic_data1, order_book_data=[snapshot1, snapshot2], ) producer.setup(producer.min_timestamp_ns, producer.max_timestamp_ns) # Act streamed_data = [] while producer.has_data: streamed_data.append(producer.next()) # Assert timestamps = [x.ts_recv_ns for x in streamed_data] assert timestamps == [0, 0, 500000, 1000000, 1000000, 2000000] assert producer.min_timestamp_ns == 0 assert producer.max_timestamp_ns == 2_000_000 assert producer.min_timestamp == pd.Timestamp( "1970-01-01 00:00:00.000000+0000", tz="UTC") assert producer.max_timestamp == pd.Timestamp( "1970-01-01 00:00:00.002000+0000", tz="UTC")