def normalize(ticker: Ticker, calc_price: bool = False) -> dict: # convert named tuples to dicts so we send usable keys new_ticks = [] for tick in ticker.ticks: td = tick._asdict() if td['tickType'] in (48, 77): td['type'] = 'trade' new_ticks.append(td) ticker.ticks = new_ticks # some contracts don't have volume so we may want to calculate # a midpoint price based on data we can acquire (such as bid / ask) if calc_price: ticker.ticks.append({'type': 'trade', 'price': ticker.marketPrice()}) # serialize for transport data = asdict(ticker) # add time stamps for downstream latency measurements data['brokerd_ts'] = time.time() if ticker.rtTime: data['broker_ts'] = data['rtTime_s'] = float(ticker.rtTime) / 1000. return data
def normalize(ticker: Ticker, calc_price: bool = False) -> dict: # convert named tuples to dicts so we send usable keys new_ticks = [] for tick in ticker.ticks: td = tick._asdict() td['type'] = tick_types.get(td['tickType'], 'n/a') new_ticks.append(td) ticker.ticks = new_ticks # some contracts don't have volume so we may want to calculate # a midpoint price based on data we can acquire (such as bid / ask) if calc_price: ticker.ticks.append({'type': 'trade', 'price': ticker.marketPrice()}) # serialize for transport data = asdict(ticker) # add time stamps for downstream latency measurements data['brokerd_ts'] = time.time() # stupid stupid shit...don't even care any more.. # leave it until we do a proper latency study # if ticker.rtTime is not None: # data['broker_ts'] = data['rtTime_s'] = float( # ticker.rtTime.timestamp) / 1000. data.pop('rtTime') return data