def test_secs_to_nanos(self, value, expected): # Arrange # Act result = secs_to_nanos(value) # Assert assert result == expected
def parse_book_update_ws( instrument_id: InstrumentId, data: Dict[str, Any], ts_init: int, ) -> OrderBookDeltas: ts_event: int = secs_to_nanos(data["time"]) update_id: int = data["checksum"] bid_deltas: List[OrderBookDelta] = [ parse_book_delta_ws(instrument_id, OrderSide.BUY, d, ts_event, ts_init, update_id) for d in data["bids"] ] ask_deltas: List[OrderBookDelta] = [ parse_book_delta_ws(instrument_id, OrderSide.SELL, d, ts_event, ts_init, update_id) for d in data["asks"] ] return OrderBookDeltas( instrument_id=instrument_id, book_type=BookType.L2_MBP, deltas=bid_deltas + ask_deltas, ts_event=ts_event, ts_init=ts_init, update_id=update_id, )
async def wait_for_order( self, venue_order_id: VenueOrderId, timeout_seconds=10.0 ) -> Optional[ClientOrderId]: """ We may get an order update from the socket before our submit_order response has come back (with our betId). As a precaution, wait up to `timeout_seconds` for the betId to be added to `self.order_id_to_client_order_id`. """ assert isinstance(venue_order_id, VenueOrderId) start = self._clock.timestamp_ns() now = start while (now - start) < secs_to_nanos(timeout_seconds): # self._log.debug( # f"checking venue_order_id={venue_order_id} in {self.venue_order_id_to_client_order_id}" # ) if venue_order_id in self.venue_order_id_to_client_order_id: client_order_id = self.venue_order_id_to_client_order_id[venue_order_id] self._log.debug( f"Found order in {nanos_to_secs(now - start)} sec: {client_order_id}" ) return client_order_id now = self._clock.timestamp_ns() await asyncio.sleep(0.1) self._log.warning( f"Failed to find venue_order_id: {venue_order_id} " f"after {timeout_seconds} seconds" f"\nexisting: {self.venue_order_id_to_client_order_id})" ) return None
def parse_csv_tick(df, instrument_id): yield instrument for r in df.values: ts = secs_to_nanos(pd.Timestamp(r[0]).timestamp()) tick = QuoteTick( instrument_id=instrument_id, bid=Price.from_str(str(r[1])), ask=Price.from_str(str(r[2])), bid_size=Quantity.from_int(1_000_000), ask_size=Quantity.from_int(1_000_000), ts_event=ts, ts_init=ts, ) yield tick
def parse_book_partial_ws( instrument_id: InstrumentId, data: Dict[str, Any], ts_init: int, ) -> OrderBookSnapshot: return OrderBookSnapshot( instrument_id=instrument_id, book_type=BookType.L2_MBP, bids=[[o[0], o[1]] for o in data.get("bids")], asks=[[o[0], o[1]] for o in data.get("asks")], ts_event=secs_to_nanos(data["time"]), ts_init=ts_init, update_id=data["checksum"], )
def parse_quote_tick_ws( instrument: Instrument, data: Dict[str, Any], ts_init: int, ) -> QuoteTick: return QuoteTick( instrument_id=instrument.id, bid=Price(data["bid"], instrument.price_precision), ask=Price(data["ask"], instrument.price_precision), bid_size=Quantity(data["bidSize"], instrument.size_precision), ask_size=Quantity(data["askSize"], instrument.size_precision), ts_event=secs_to_nanos(data["time"]), ts_init=ts_init, )
def parse_bars_http( instrument: Instrument, bar_type: BarType, data: List[Dict[str, Any]], ts_event_delta: int, ts_init: int, ) -> List[Bar]: bars: List[Bar] = [] for row in data: bar: Bar = Bar( bar_type=bar_type, open=Price(row["open"], instrument.price_precision), high=Price(row["high"], instrument.price_precision), low=Price(row["low"], instrument.price_precision), close=Price(row["close"], instrument.price_precision), volume=Quantity(row["volume"], instrument.size_precision), check=True, ts_event=secs_to_nanos(row["time"]) + ts_event_delta, ts_init=ts_init, ) bars.append(bar) return bars
async def _request_bars( # noqa C901 'FTXDataClient._request_bars' is too complex (11) self, bar_type: BarType, from_datetime: pd.Timestamp, to_datetime: pd.Timestamp, limit: int, correlation_id: UUID4, ): instrument = self._instrument_provider.find(bar_type.instrument_id) if instrument is None: self._log.error( f"Cannot parse historical bars: " f"no instrument found for {bar_type.instrument_id}.", ) return if bar_type.spec.aggregation == BarAggregation.SECOND: resolution = bar_type.spec.step elif bar_type.spec.aggregation == BarAggregation.MINUTE: resolution = bar_type.spec.step * 60 elif bar_type.spec.aggregation == BarAggregation.HOUR: resolution = bar_type.spec.step * 60 * 60 elif bar_type.spec.aggregation == BarAggregation.DAY: resolution = bar_type.spec.step * 60 * 60 * 24 else: # pragma: no cover (design-time error) raise RuntimeError( f"invalid aggregation type, " f"was {BarAggregationParser.to_str_py(bar_type.spec.aggregation)}", ) # Define validation constants max_seconds: int = 30 * 86400 valid_windows: List[int] = [15, 60, 300, 900, 3600, 14400, 86400] # Validate resolution if resolution > max_seconds: self._log.error( f"Cannot request bars for {bar_type}: " f"seconds window exceeds MAX_SECONDS {max_seconds}.", ) return if resolution > 86400 and resolution % 86400 != 0: self._log.error( f"Cannot request bars for {bar_type}: " f"seconds window exceeds 1 day 86,400 and not a multiple of 1 day.", ) return elif resolution not in valid_windows: self._log.error( f"Cannot request bars for {bar_type}: " f"invalid seconds window, use one of {valid_windows}.", ) return # Get historical bars data data: List[Dict[str, Any]] = await self._http_client.get_historical_prices( market=bar_type.instrument_id.symbol.value, resolution=resolution, start_time=int(from_datetime.timestamp()) if from_datetime is not None else None, end_time=int(to_datetime.timestamp()) if to_datetime is not None else None, ) # Limit bars data if limit: while len(data) > limit: data.pop(0) # Pop left bars: List[Bar] = parse_bars_http( instrument=instrument, bar_type=bar_type, data=data, ts_event_delta=secs_to_nanos(resolution), ts_init=self._clock.timestamp_ns(), ) partial: Bar = bars.pop() self._handle_bars(bar_type, bars, partial, correlation_id)