async def subscribe_to_ws(self, ping_interval: int = 10, ping_timeout: int = 30): """Subscribe to websocket. """ feeds = ["addOrder", "cancelOrder"] self.ws = await websockets.connect(uri=self._ws_uri, ping_interval=ping_interval, ping_timeout=ping_timeout) self.ws_token = await self.api.get_websocket_auth_token() for feed in feeds: try: data = { "event": "subscribe", "subscription": { "name": feed, "token": self.ws_token.value['token'] } } payload = ujson.dumps(data) await self.ws.send(payload) await asyncio.sleep(0.1) except Exception as e: log_exception(logger, e) for _key, model in self.execution_models.items(): await self.setup_execution_ws(model)
def calculate_indicator(self, func, source, **kwargs): """ Args: talib_func: TA-Lib function source: column of self.ohlc we want to use as input kwargs: kwargs specific to TA-Lib function Notes: add signals into the instance data dataframe Example: self.add_signals(talib.MAMA, "close", fastlimit=0.5, slowlimit=0.05) """ try: result = func(self.df[source], **kwargs) except Exception as e: log_exception(logger, e) i = 0 func_name = str(func.__name__) try: if isinstance(result, tuple): for item in result: col_name = f"{func_name}{i}" self.df[col_name] = item i += 1 else: self.df[func_name] = result except Exception as e: log_exception(logger, e)
def parse_order_update_by_id(message): try: # parsed_orders = { # key: MAP_ORDER_STATUS[value["status"]] for key, value in orderdict.items() for orderdict in message[0] # } # return parsed_orders # ==>first test to split status changes / new order new_orders = { key: parse_single_order(key, value) for order_dict in message[0] for key, value in order_dict.items() if list(value.keys()) > 1 } status_changes = { key: { "ordStatus": MAP_ORDER_STATUS[value["status"]] } for key, value in orderdict.items() for orderdict in message[0] if list(value.keys()) == "status" } return {"insert": new_orders, "update": status_changes} except Exception as e: log_exception(logger, e)
def _load_all_env_keys(self): """Loads all keys from env file into cls.env_keys_dq Notes: In .env file, keys should contain : API Key : <exchange-name> & "key" API Secret : <exchange-name> & "secret" """ try: env_list_of_tuples = [] env_dict = list(dict(os.environ).items()) for k, v in env_dict: if self.exchange.upper() in k: if "KEY" in k: tuple_api_key = v env_secret_key = k.replace("KEY", "SECRET") tuple_secret_key = os.environ[env_secret_key] env_list_of_tuples.append( (tuple_api_key, tuple_secret_key)) self._set_class_var( deque(env_list_of_tuples, maxlen=len(env_list_of_tuples))) except Exception as e: log_exception(logger, e)
async def public(self, pairs, timeframe, depth, feed): map_to_exchange = { "orderbook": "book", "instrument": "ticker", "trade": "trade", "ohlc": "ohlc", "spread": "spread" } try: data = { "event": "subscribe", "pair": [pair.replace("-", "/") for pair in pairs], "subscription": { "name": map_to_exchange[feed] } } if feed == "ohlc": data["subscription"]["interval"] = timeframe if feed == "book": data["subscription"]["depth"] = depth return data except Exception as e: log_exception(logger, e) raise
async def close(self): try: # await self.ws.wait_closed() await self.ws.close() except Exception as e: log_exception(logger, e) await log_exc_to_db(logger, e)
async def _handle_response_errors(self, response, endpoint, data) -> ErrorHandlerResult: try: result = self.response_parser.handle_errors( response, endpoint, data) except Exception as e: log_exception(logger, e) await log_exc_to_db(logger, e) # try: # logging.warning(result) # # validated_result = ErrorHandlerResult(**result) # validated_result = OKResult(result.dict()) # except ValidationError as e: # logging.error(e) # return ErrorResult(accept=True, value=str(e)) if not isinstance(result, ErrorHandlerResult): error_msg = "Invalid Type: Result needs to be ErrorResult or OKResult" logger.error(error_msg) return ErrorResult(accept=True, value=error_msg) # if isinstance(validated_result.value, BaseError): if not result.is_ok: # result["value"] returns one of our custom error classes here # exception = result.value logger.error(result.value) if result.sleep: await asyncio.sleep(result.sleep) return result else: return result
def parse_single_trade(info, pair): "parse single item of the list of trade" try: parsed_trade = { "trdMatchID": None, "orderID": None, "symbol": pair.replace("/", "-"), "side": "buy" if (info[3] == "b") else "sell", "ordType": "market" if (info[4] == "m") else "limit", "avgPx": info[0], "cumQty": info[1], "grossTradeAmt": Decimal(info[0]) * Decimal(info[1]), "transactTime": Decimal(info[2]) * 10**9, } except Exception as e: log_exception(logger, e) return parsed_trade # KRAKEN STREAM FORMAT (FROM DOC) # channelID: integer ChannelID of pair-trade subscription # Array array # Array array # price float Price # volume float Volume # time float Time, seconds since epoch # side string Triggering order side, buy/sell # orderType string Triggering order type market/limit # misc string Miscellaneous # channelName: string Channel Name of subscription # pair: string Asset pair # EXAMPLE # [ # 0, # [ # [ # "5541.20000", # "0.15850568", # "1534614057.321597", # "s", # "l", # "" # ], # [ # "6060.00000", # "0.02455000", # "1534614057.324998", # "b", # "l", # "" # ] # ], # "trade", # "XBT/USD" # ]
async def publish_data_trade(self, msg, redis_pool): # public trades # no snapshots try: parsed = self.stream_parser.trade(msg) # should return dict that we validates vs Trade Model validated = TradesList(data=parsed, last=None) # then we want to return a response value = validated.data resp = OKResponse(status_code=200, value=value) # resp value is a list of pydantic Trade Models # we need to check symbol for each item of list and dispatch accordingly for item in resp.value: # logger.info(ujson.dumps(item.dict())) update_chan = f"ws:public:data:trade:update:{self.exchange}:{item.symbol}" await redis_pool.publish(update_chan, ujson.dumps(item.dict())) except ValidationError as e: logger.error(e) return ErrorResponse(status_code=404, value=str(e)) except Exception as e: log_exception(logger, e)
async def publish_data_orderbook(self, msg, redis_pool): try: # with current logic parser needs to return a dict # that has bool values for keys is_snapshot and is_update parsed = self.stream_parser.orderbook(msg) # should return dict that we validates vs Trade Model validated = OrderBook(**parsed) # then we want to return a response if validated.is_snapshot: self.full_orderbook["asks"] = Counter(validated.asks) self.full_orderbook["bids"] = Counter(validated.bids) update_chan = f"ws:public:data:orderbook:snapshot:{self.exchange}:{validated.symbol}" else: self.full_orderbook["asks"] += Counter(validated.asks) self.full_orderbook["bids"] += Counter(validated.bids) update_chan = f"ws:public:data:orderbook:update:{self.exchange}:{validated.symbol}" resp = OKResponse(status_code=200, value=self.full_orderbook) await redis_pool.publish(update_chan, ujson.dumps(resp.value)) except ValidationError as e: logger.error(e) return ErrorResponse(status_code=404, value=str(e)) except Exception as e: log_exception(logger, e)
async def publish_data_trade(self, msg, redis_pool): # public trades # ignore snapshot as it will only give us past 50 trades (useless) try: try: self.feed_counters["trade"] += 1 parsed = self.stream_parser.trade(msg) except KeyError: self.feed_counters["trade"] = 0 parsed = self.stream_parser.user_trade(msg) # should return dict that we validates vs Trade Model validated = TradesList(data=parsed) # then we want to return a response value = validated.data resp = OKResponse(status_code=200, value=value) # resp value is a list of pydantic Trade Models # we need to check symbol for each item of list and dispatch accordingly for item in resp.value: logger.info(item) update_chan = f"ws:private:data:trade:update:{self.exchange}:{item.symbol}" await redis_pool.publish(update_chan, ujson.dumps(item.dict())) except ValidationError as e: logger.error(e) await log_exc_to_db(logger, e) return ErrorResponse(status_code=404, value=str(e)) except Exception as e: log_exception(logger, e) await log_exc_to_db(logger, e)
def parse_add_order(validated_data, token): try: parsed_add_order = { "event": "addOrder", "token": token, "reqid": "null", # check again, reqid is of type int while clOrdID is str "ordertype": validated_data.ordType, "type": validated_data.side, "pair": validated_data.symbol.replace("-", "/"), "price": validated_data.price, "price2": validated_data.stopPx if validated_data.stopPx else "null", "volume": validated_data.orderQty, "leverage": "null" if validated_data.marginRatio == 1 else int(1/validated_data.marginRatio), "oflags": "null", "starttm": validated_data.effectiveTime if validated_data.effectiveTime else "null", "expiretm": validated_data.expireTime if validated_data.expireTime else 'null', "userref": validated_data.clOrdID if validated_data.clOrdID else 'null', "validate": "false", } except Exception as e: log_exception(logger, e) return parsed_add_order
async def setup_strats(self): for strat in self.strats: try: await strat.register_to_db() await strat.subscribe_to_ws() logger.debug(strat.ws) logger.debug(strat.ws_token) logger.info(f"Strategy : {strat.name} --- Running") logger.info(f"Arguments : {strat.exchange} - {strat.symbol} - {strat.timeframe}") except Exception as e: log_exception(logger, e) if strat.execution_models: for _key, model in strat.execution_models.items(): try: await model.setup() self.tasks.extend(model.redis_tasks) logger.debug(model.ws) logger.debug(model.ws_token) except Exception as e: log_exception(logger, e) self.tasks.append(strat.main_loop())
def add_short_order(self, total_vol): # be careful that decimal places is within tolerance of exchange API try: orderQty = round(total_vol, self.volume_decimals) except Exception as e: log_exception(logger, e) self.add_order(orderQty, side="sell")
async def update_user_trades(exchange, message): try: if message is None: return msg = message.decode("utf-8") new_trade = ujson.loads(msg) exchange_id = settings.EXCHANGE_IDS_FROM_NAME[exchange] # deprecated: doesnt return a TradesList anymore but single Trade Models # for model in new_trade: # await Trade.create(**model, exchange=exchange_id) # ==> new order is now a single Trade model #!!! we need to account for foreign keys and suffix the key with _id new_trade["orderID_id"] = new_trade.pop("orderID") # check that orderID we pass already exists in order table orderID_queryset = await Order.filter(orderID=new_trade["orderID_id"] ).values() logger.info(orderID_queryset) if orderID_queryset: await Trade.create(**new_trade, exchange_id=exchange_id) else: logger.info( f"No entry for trade {new_trade['trdMatchID']} - order {new_trade['orderID_id']}" ) except Exception as e: log_exception(logger, e) await log_exc_to_db(logger, e)
async def consume_private(self, exchange): retries = 0 delay = 1 while retries <= self.retries or self.retries == -1: try: async for msg in self.private_feed_readers[exchange].ws: if self.terminate: break private_fr = self.private_feed_readers[exchange] await private_fr.msg_handler(msg, self.redis_pool) # print(msg) except CancelledError: return except (ConnectionClosed, ConnectionAbortedError, ConnectionResetError, socket_error) as e: log_exception(logger, e) await asyncio.sleep(delay) await self.connect_private(exchange) retries += 1 delay *= 2 except Exception as e: log_exception(logger, e) await asyncio.sleep(delay) retries += 1 delay *= 2
async def startup_exchange_table(): ''' instantiate table if the passed dict is empty \t ==> we will need to define which exchanges need to be passed somehow and what their ids should be ''' logger.warning(f"Exchange DBTable is empty") for exchange_name, exchange_id in settings.EXCHANGE_IDS_FROM_NAME.items(): try: values = await Exchange.filter(name=exchange_name).values() logger.error(f"Exchanges : {values}") except Exception as e: log_exception(logger, e) await log_exc_to_db(logger, e) if not values: try: await Exchange.create(id=exchange_id, name=exchange_name) except Exception as e: log_exception(logger, e) await log_exc_to_db(logger, e) logger.warning( f"Added {exchange_name.upper()} to Exchange DBTable -- Exchange ID:{exchange_id}" ) logger.warning("Exchange DBTable instantiated")
async def close_ws(self): """Close websocket connection """ try: # await self.ws.wait_closed() await self.ws.close() except Exception as e: log_exception(logger, e)
def parse_user_trade(message): try: parsed_trades = [ parse_single_trade(key, value) for trade_dict in message[0] for key, value in trade_dict.items() ] return parsed_trades except Exception as e: log_exception(logger, e)
def parse_order_snapshot_by_id(message): try: parsed_orders = { key: parse_single_order(key, value) for order_dict in message[0] for key, value in order_dict.items() } return parsed_orders except Exception as e: log_exception(logger, e)
def run_server(host, port, auto_reload): try: main_server.run("noobit.server.main_app:app", host=host, port=port, reload=auto_reload) except KeyboardInterrupt: pass except Exception as e: log_exception(logger, e)
def run(self): process_id = os.getpid() logger.info(f"Starting process {process_id}") loop = uvloop.new_event_loop() asyncio.set_event_loop(loop) try: loop.run_until_complete(self.init_tortoise()) loop.run_until_complete(self.setup_strats()) except Exception as e: log_exception(logger, e) try: loop.run_until_complete(self.main()) except KeyboardInterrupt: self.shutdown_strats() print("Keyboard Interrupt") finally: loop = asyncio.get_event_loop() tasks = asyncio.all_tasks(loop) logger.info("Initiating shutdown") for task in tasks: task.cancel() try: loop.run_until_complete(task) except asyncio.CancelledError: logger.info(f'{task} is now cancelled') logger.info("Closing Db connections") loop.run_until_complete(self.shutdown_tortoise()) logger.info("Stopping Event Loop") loop.stop() logger.info("Closing Event Loop") loop.close() # if __name__ == "__main__": # strategy = "mock_strat" # strat_dir_path = "noobit_user.strategies" # strat_file_path = f"{strat_dir_path}.{strategy}" # strategy = import_module(strat_file_path) # strat = strategy.Strategy(exchange="kraken", # pair="XBT-USD", # timeframe=60, # volume=0 # ) # runner = BackTestRunner(strats=[strat]) # runner.run()
def parse_trades_to_list(message): pair = message[3] try: parsed_trades = [parse_single_trade(info, pair) for info in message[1]] except Exception as e: log_exception(logger, e) if parsed_trades: return parsed_trades
def run_feedhandler(exchanges, symbols, private_feeds, public_feeds): try: fh = FeedHandler(exchanges=exchanges, private_feeds=private_feeds, public_feeds=public_feeds, pairs=[symbol.upper() for symbol in symbols]) fh.run() except KeyboardInterrupt: pass except Exception as e: log_exception(logger, e)
async def update_public_trades(exchange, message): try: if message is None: return msg = message.decode("utf-8") new_trade = ujson.loads(msg) logger.info(new_trade) except Exception as e: log_exception(logger, e) await log_exc_to_db(logger, e)
async def update_public_orderbook(exchange, message): try: if message is None: return msg = message.decode("utf-8") new_book = ujson.loads(msg) # logging.info(new_book) except Exception as e: log_exception(logger, e) await log_exc_to_db(logger, e)
async def record_new_account_update(event: str): """Insert new balance record into database and update redis cache Args: event (str) : event that triggered the update (periodic, trade ?) """ for exchange_name, exchange_id in settings.EXCHANGE_IDS_FROM_NAME.items(): #! update later api_key = f"{exchange_name}" api = rest_api_map[api_key]() #! check if this returns an OKResponse balances = await api.get_balances() if not balances.is_ok: return exposure = await api.get_exposure() if not exposure.is_ok: return open_positions = await api.get_open_positions(mode="by_id") if not open_positions.is_ok: return # redis = settings.AIOREDIS_POOL # TODO we could make each get request send the update value to redis ? # await redis.set(f"db:balance:holdings:{exchange_name}", ujson.dumps(holdings)) # await redis.set(f"db:balance:positions:{exchange_name}", ujson.dumps(positions_vol)) # await redis.set(f"db:balance:account_value:{exchange_name}", ujson.dumps(account_value)) # await redis.set(f"db:balance:margin_level:{exchange_name}", ujson.dumps(margin_level)) # await redis.set(f"db:balance:positions_unrealized:{exchange_name}", ujson.dumps(positions_pnl)) #! WE STOPPED HERE, BLOCKED WITH A JSON SERIALIZING ISSUE #! HOW TO SERIALIZE DATETIME #! ==> lets just convert everything into time.time_ns instead try: await Account.create(event=event, exchange_id=ujson.dumps(exchange_id), balances=ujson.dumps(balances.value), exposure=ujson.dumps(exposure.value), open_positions=ujson.dumps( open_positions.value)) except Exception as e: log_exception(logger, e) await log_exc_to_db(logger, e) logger.warning( f"Balance : New event {event} for exchange {exchange_name} - db record inserted" )
def parse_spread(message): try: parsed = { "symbol": message[3].replace("/", "-"), "bestBid": message[1][0], "bestAsk": message[1][1], "utcTime": Decimal(message[1][2]) * 10**9 } except Exception as e: log_exception(logger, e) return parsed
async def print_state(self): """ Just a function to test that we are able to continuously read from state """ try: async for current_state in self.state: if self.should_exit: break else: state = current_state[self.symbol] logger.info(state["spread"]) # prevent blocking await asyncio.sleep(0) except Exception as e: log_exception(logger, e)
def parse_cancel_order(validated_data: CancelOrder, token: str): try: parsed = { "event": "cancelOrder", "token": token, "reqid": validated_data.clOrdID if validated_data.clOrdID else "null", "txid": validated_data.orderID } except Exception as e: log_exception(logger, e) return parsed