async def fetch_ticks(self, from_id: int = None, do_print: bool = True): params = {'symbol': self.symbol, 'limit': 1000} if from_id is not None: params['from'] = max(0, from_id) try: ticks = await self.public_get(self.endpoints['ticks'], params) except Exception as e: print('error fetching ticks', e) return [] try: trades = [{ 'trade_id': int(tick['id']), 'price': float(tick['price']), 'qty': float(tick['qty']), 'timestamp': date_to_ts(tick['time']), 'is_buyer_maker': tick['side'] == 'Sell' } for tick in ticks['result']] if do_print: print_([ 'fetched trades', self.symbol, trades[0]['trade_id'], ts_to_date(float(trades[0]['timestamp']) / 1000) ]) except: trades = [] if do_print: print_(['fetched no new trades', self.symbol]) return trades
async def find_df_enclosing_timestamp(self, timestamp, guessed_chunk=None): if guessed_chunk is not None: if guessed_chunk[0]["timestamp"] < timestamp < guessed_chunk[-1][ "timestamp"]: print_(["found id"]) return self.transform_ticks(guessed_chunk) else: guessed_chunk = sorted(await self.bot.fetch_ticks(do_print=False), key=lambda x: x["trade_id"]) return await self.find_df_enclosing_timestamp( timestamp, guessed_chunk) if timestamp < guessed_chunk[0]["timestamp"]: guessed_id = guessed_chunk[0]["trade_id"] - len(guessed_chunk) * ( guessed_chunk[0]["timestamp"] - timestamp) / (guessed_chunk[-1]["timestamp"] - guessed_chunk[0]["timestamp"]) else: guessed_id = guessed_chunk[-1]["trade_id"] + len(guessed_chunk) * ( timestamp - guessed_chunk[-1]["timestamp"]) / ( guessed_chunk[-1]["timestamp"] - guessed_chunk[0]["timestamp"]) guessed_id = int(guessed_id - len(guessed_chunk) / 2) guessed_chunk = sorted( await self.bot.fetch_ticks(guessed_id, do_print=False), key=lambda x: x["trade_id"], ) print_([ f"guessed_id {guessed_id} earliest ts {ts_to_date(guessed_chunk[0]['timestamp'] / 1000)[:19]} last ts {ts_to_date(guessed_chunk[-1]['timestamp'] / 1000)[:19]} target ts {ts_to_date(timestamp / 1000)[:19]}" ]) return await self.find_df_enclosing_timestamp(timestamp, guessed_chunk)
async def get_csv_gz(self, base_url, symbol, date, df_for_id_matching): """ Fetches a full day of trades from the Bybit repository. @param symbol: Symbol to fetch. @param date: Day to download. @return: Dataframe with full day. """ print_(["Fetching", symbol, date]) url = f"{base_url}{symbol.upper()}/{symbol.upper()}{date}.csv.gz" df = pd.DataFrame(columns=[ "trade_id", "price", "qty", "timestamp", "is_buyer_maker" ]) try: resp = urlopen(url) with gzip.open(BytesIO(resp.read())) as f: ff = pd.read_csv(f) trade_ids = np.zeros(len(ff)).astype(np.int64) tf = pd.DataFrame({ "trade_id": trade_ids, "price": ff.price.astype(np.float64), "qty": ff["size"].astype(np.float64), "timestamp": (ff.timestamp * 1000).astype(np.int64), "is_buyer_maker": (ff.side == "Sell").astype(np.int8), }) tf["trade_id"] = self.deduce_trade_ids(tf, df_for_id_matching) tf.sort_values("timestamp", inplace=True) tf.reset_index(drop=True, inplace=True) del ff df = tf except Exception as e: print("Failed to fetch", date, e) return df
async def fetch_ticks(self, from_id: int = None, do_print: bool = True): params = {"symbol": self.symbol, "limit": 1000} if from_id is not None: params["from"] = max(0, from_id) try: ticks = await self.public_get(self.endpoints["ticks"], params) except Exception as e: print("error fetching ticks", e) return [] try: trades = [{ "trade_id": int(tick["id"]), "price": float(tick["price"]), "qty": float(tick["qty"]), "timestamp": date_to_ts(tick["time"]), "is_buyer_maker": tick["side"] == "Sell", } for tick in ticks["result"]] if do_print: print_([ "fetched trades", self.symbol, trades[0]["trade_id"], ts_to_date(float(trades[0]["timestamp"]) / 1000), ]) except: trades = [] if do_print: print_(["fetched no new trades", self.symbol]) return trades
async def get_all_income(self, symbol: str = None, start_time: int = None, income_type: str = 'Trade', end_time: int = None): limit = 50 income = [] page = 1 while True: fetched = await self.fetch_income(symbol=symbol, start_time=start_time, income_type=income_type, limit=limit, page=page) if len(fetched) == 0: break print_(['fetched income', ts_to_date(fetched[0]['timestamp'])]) if fetched == income[-len(fetched):]: break income += fetched if len(fetched) < limit: break page += 1 income_d = {e['transaction_id']: e for e in income} return sorted(income_d.values(), key=lambda x: x['timestamp'])
async def fetch_ticks(self, from_id: int = None, start_time: int = None, end_time: int = None, do_print: bool = True): params = {'symbol': self.symbol, 'limit': 1000} if from_id is not None: params['fromId'] = max(0, from_id) if start_time is not None: params['startTime'] = start_time if end_time is not None: params['endTime'] = end_time try: fetched = await self.public_get(self.endpoints['ticks'], params) except Exception as e: print('error fetching ticks a', e) return [] try: ticks = [{'trade_id': int(t['a']), 'price': float(t['p']), 'qty': float(t['q']), 'timestamp': int(t['T']), 'is_buyer_maker': t['m']} for t in fetched] if do_print: print_(['fetched ticks', self.symbol, ticks[0]['trade_id'], ts_to_date(float(ticks[0]['timestamp']) / 1000)]) except Exception as e: print('error fetching ticks b', e, fetched) ticks = [] if do_print: print_(['fetched no new ticks', self.symbol]) return ticks
def get_zip(self, base_url, symbol, date): """ Fetches a full day of trades from the Binance repository. @param symbol: Symbol to fetch. @param date: Day to download. @return: Dataframe with full day. """ print_(["Fetching", symbol, date]) url = "{}{}/{}-aggTrades-{}.zip".format(base_url, symbol.upper(), symbol.upper(), date) df = pd.DataFrame(columns=[ "trade_id", "price", "qty", "timestamp", "is_buyer_maker" ]) column_names = [ "trade_id", "price", "qty", "first", "last", "timestamp", "is_buyer_maker", ] if self.spot: column_names.append("best_match") try: resp = urlopen(url) file_tmp = BytesIO() with tqdm.wrapattr(open(os.devnull, "wb"), "write", miniters=1, total=getattr(resp, "length", None)) as fout: for chunk in resp: fout.write(chunk) file_tmp.write(chunk) with ZipFile(file_tmp) as my_zip_file: for contained_file in my_zip_file.namelist(): tf = pd.read_csv(my_zip_file.open(contained_file), names=column_names) tf.drop( errors="ignore", columns=["first", "last", "best_match"], inplace=True, ) tf["trade_id"] = tf["trade_id"].astype(np.int64) tf["price"] = tf["price"].astype(np.float64) tf["qty"] = tf["qty"].astype(np.float64) tf["timestamp"] = tf["timestamp"].astype(np.int64) tf["is_buyer_maker"] = tf["is_buyer_maker"].astype(np.int8) tf.sort_values("trade_id", inplace=True) tf.drop_duplicates("trade_id", inplace=True) tf.reset_index(drop=True, inplace=True) if df.empty: df = tf else: df = pd.concat([df, tf]) except Exception as e: print("Failed to fetch", date, e) return df
async def init_user_stream(self) -> None: try: response = await self.private_post(self.endpoints['listen_key']) self.listen_key = response['listenKey'] self.endpoints['websocket_user'] = self.endpoints['websocket'] + self.listen_key except Exception as e: traceback.print_exc() print_(['error fetching listen key', e])
async def beat_heart_user_stream(self) -> None: while True: await asyncio.sleep(27) try: await self.ws.send(json.dumps({'op': 'ping'})) except Exception as e: traceback.print_exc() print_(['error sending heartbeat', e])
async def init_user_stream(self) -> None: try: response = await self.private_post(self.endpoints["listen_key"]) self.listen_key = response["listenKey"] self.endpoints["websocket_user"] = self.endpoints[ "websocket"] + self.listen_key except Exception as e: traceback.print_exc() print_(["error fetching listen key", e])
def update_output_information(self): self.ts_released['print'] = time() line = f"{self.symbol} " if self.position['long']['size'] != 0.0: line += f"l {self.position['long']['size']} @ " line += f"{round_(self.position['long']['price'], self.price_step)}, " long_closes = sorted([ o for o in self.open_orders if o['side'] == 'sell' and o['position_side'] == 'long' ], key=lambda x: x['price']) long_entries = sorted([ o for o in self.open_orders if o['side'] == 'buy' and o['position_side'] == 'long' ], key=lambda x: x['price']) leqty, leprice = (long_entries[-1]['qty'], long_entries[-1]['price']) if long_entries else ( 0.0, 0.0) lcqty, lcprice = (long_closes[0]['qty'], long_closes[0]['price']) if long_closes else ( 0.0, 0.0) line += f"e {leqty} @ {leprice}, c {lcqty} @ {lcprice} " if self.position['shrt']['size'] != 0.0: shrt_closes = sorted([ o for o in self.open_orders if o['side'] == 'buy' and o['position_side'] == 'shrt' ], key=lambda x: x['price']) shrt_entries = sorted([ o for o in self.open_orders if o['side'] == 'sell' and o['position_side'] == 'shrt' ], key=lambda x: x['price']) seqty, seprice = (shrt_entries[0]['qty'], shrt_entries[0]['price']) if shrt_entries else ( 0.0, 0.0) scqty, scprice = (shrt_closes[-1]['qty'], shrt_closes[-1]['price']) if shrt_closes else ( 0.0, 0.0) line += f"s {self.position['shrt']['size']} @ " line += f"{round_(self.position['shrt']['price'], self.price_step)}, " line += f"e {seqty} @ {seprice}, c {scqty} @ {scprice} " if self.position['long']['size'] > abs(self.position['shrt']['size']): liq_price = self.position['long']['liquidation_price'] else: liq_price = self.position['shrt']['liquidation_price'] line += f"|| last {self.price} " line += f"lpprc diff {calc_diff(self.position['long']['price'], self.price):.3f} " line += f"spprc diff {calc_diff(self.position['shrt']['price'], self.price):.3f} " line += f"liq {round_dynamic(liq_price, 5)} " line += f"lpbr {self.position['long']['pbr']:.3f} " line += f"spbr {self.position['shrt']['pbr']:.3f} " line += f"bal {round_dynamic(self.position['wallet_balance'], 5)} " line += f"eq {round_dynamic(self.position['equity'], 5)} " print_([line], r=True)
async def get_all_income(self, symbol: str = None, start_time: int = None, income_type: str = 'realized_pnl', end_time: int = None): income = [] while True: fetched = await self.fetch_income(symbol=symbol, start_time=start_time, income_type=income_type, limit=1000) print_(['fetched income', ts_to_date(fetched[0]['timestamp'])]) if fetched == income[-len(fetched):]: break income += fetched if len(fetched) < 1000: break start_time = income[-1]['timestamp'] income_d = {e['transaction_id']: e for e in income} return sorted(income_d.values(), key=lambda x: x['timestamp'])
async def get_sampled_ticks(self) -> np.ndarray: """ Function for direct use in the backtester. Checks if the numpy arrays exist and if so loads them. If they do not exist or if their length doesn't match, download the missing data and create them. @return: numpy array. """ if os.path.exists(self.tick_filepath): print_(["Loading cached tick data from", self.tick_filepath]) tick_data = np.load(self.tick_filepath) return tick_data await self.download_ticks() await self.prepare_files() tick_data = np.load(self.tick_filepath) return tick_data
async def cancel_orders(self, orders_to_cancel: [dict]) -> [dict]: if not orders_to_cancel: return if self.ts_locked['cancel_orders'] > self.ts_released['cancel_orders']: return self.ts_locked['cancel_orders'] = time() try: deletions = [] for oc in orders_to_cancel: try: deletions.append((oc, asyncio.create_task(self.execute_cancellation(oc)))) except Exception as e: print_(['error cancelling order a', oc, e]) cancelled_orders = [] for oc, c in deletions: try: o = await c cancelled_orders.append(o) if 'order_id' in o: print_(['cancelled order', o['symbol'], o['side'], o['position_side'], o['qty'], o['price']], n=True) self.open_orders = [oo for oo in self.open_orders if oo['order_id'] != o['order_id']] else: print_(['error cancelling order', o], n=True) self.dump_log({'log_type': 'cancel_order', 'data': o}) except Exception as e: print_(['error cancelling order b', oc, c.exception(), e], n=True) self.dump_log({'log_type': 'cancel_order', 'data': {'result': str(c.exception()), 'error': repr(e), 'data': oc}}) return cancelled_orders finally: self.ts_released['cancel_orders'] = time()
async def create_orders(self, orders_to_create: [dict]) -> [dict]: if not orders_to_create: return [] if self.ts_locked['create_orders'] > self.ts_released['create_orders']: return [] self.ts_locked['create_orders'] = time() try: creations = [] for oc in sorted(orders_to_create, key=lambda x: calc_diff(x['price'], self.price)): try: creations.append((oc, asyncio.create_task(self.execute_order(oc)))) except Exception as e: print_(['error creating order a', oc, e], n=True) created_orders = [] for oc, c in creations: try: o = await c created_orders.append(o) if 'side' in o: print_([' created order', o['symbol'], o['side'], o['position_side'], o['qty'], o['price']], n=True) if o['order_id'] not in {x['order_id'] for x in self.open_orders}: self.open_orders.append(o) else: print_(['error creating order b', o, oc], n=True) self.dump_log({'log_type': 'create_order', 'data': o}) except Exception as e: print_(['error creating order c', oc, c.exception(), e], n=True) self.dump_log({'log_type': 'create_order', 'data': {'result': str(c.exception()), 'error': repr(e), 'data': oc}}) return created_orders finally: self.ts_released['create_orders'] = time()
async def start_websocket_user_stream(self) -> None: await self.init_user_stream() asyncio.create_task(self.beat_heart_user_stream()) print_(['url', self.endpoints['websocket_user']]) async with websockets.connect(self.endpoints['websocket_user']) as ws: self.ws = ws await self.subscribe_to_user_stream(ws) async for msg in ws: if msg is None: continue try: if self.stop_websocket: break asyncio.create_task(self.on_user_stream_events(self.standardize_user_stream_event(json.loads(msg)))) except Exception as e: print(['error in websocket user stream', e]) traceback.print_exc()
async def cancel_orders(self, orders_to_cancel: [dict]) -> [dict]: if not orders_to_cancel: return if self.ts_locked["cancel_orders"] > self.ts_released["cancel_orders"]: return self.ts_locked["cancel_orders"] = time() try: deletions = [] for oc in orders_to_cancel: try: deletions.append( (oc, asyncio.create_task(self.execute_cancellation(oc)))) except Exception as e: print_(["error cancelling order a", oc, e]) cancelled_orders = [] for oc, c in deletions: try: o = await c cancelled_orders.append(o) if "order_id" in o: print_( [ "cancelled order", o["symbol"], o["side"], o["position_side"], o["qty"], o["price"], ], n=True, ) self.open_orders = [ oo for oo in self.open_orders if oo["order_id"] != o["order_id"] ] else: print_(["error cancelling order", o], n=True) self.dump_log({"log_type": "cancel_order", "data": o}) except Exception as e: print_(["error cancelling order b", oc, c.exception(), e], n=True) self.dump_log({ "log_type": "cancel_order", "data": { "result": str(c.exception()), "error": repr(e), "data": oc, }, }) return cancelled_orders finally: self.ts_released["cancel_orders"] = time()
async def create_orders(self, orders_to_create: [dict]) -> [dict]: if not orders_to_create: return [] if self.ts_locked["create_orders"] > self.ts_released["create_orders"]: return [] self.ts_locked["create_orders"] = time() try: creations = [] for oc in sorted(orders_to_create, key=lambda x: calc_diff(x["price"], self.price)): try: creations.append( (oc, asyncio.create_task(self.execute_order(oc)))) except Exception as e: print_(["error creating order a", oc, e], n=True) created_orders = [] for oc, c in creations: try: o = await c created_orders.append(o) if "side" in o: print_( [ " created order", o["symbol"], o["side"], o["position_side"], o["qty"], o["price"], ], n=True, ) if o["order_id"] not in { x["order_id"] for x in self.open_orders }: self.open_orders.append(o) else: print_(["error creating order b", o, oc], n=True) self.dump_log({"log_type": "create_order", "data": o}) except Exception as e: print_(["error creating order c", oc, c.exception(), e], n=True) self.dump_log({ "log_type": "create_order", "data": { "result": str(c.exception()), "error": repr(e), "data": oc, }, }) return created_orders finally: self.ts_released["create_orders"] = time()
def read_dataframe(self, path: str) -> pd.DataFrame: """ Reads a dataframe with correct data types. @param path: The path to the dataframe. @return: The read dataframe. """ try: df = pd.read_csv(path, dtype={ "trade_id": np.int64, "price": np.float64, "qty": np.float64, "timestamp": np.int64, "is_buyer_maker": np.int8 }) except ValueError as e: df = pd.DataFrame() print_(['Error in reading dataframe', e]) return df
def get_zip(self, base_url, symbol, date): """ Fetches a full day of trades from the Binance repository. @param symbol: Symbol to fetch. @param date: Day to download. @return: Dataframe with full day. """ print_(['Fetching', symbol, date]) url = "{}{}/{}-aggTrades-{}.zip".format(base_url, symbol.upper(), symbol.upper(), date) df = pd.DataFrame(columns=[ 'trade_id', 'price', 'qty', 'timestamp', 'is_buyer_maker' ]) column_names = [ 'trade_id', 'price', 'qty', 'first', 'last', 'timestamp', 'is_buyer_maker' ] if self.spot: column_names.append('best_match') try: resp = urlopen(url) with ZipFile(BytesIO(resp.read())) as my_zip_file: for contained_file in my_zip_file.namelist(): tf = pd.read_csv(my_zip_file.open(contained_file), names=column_names) tf.drop(errors='ignore', columns=['first', 'last', 'best_match'], inplace=True) tf["trade_id"] = tf["trade_id"].astype(np.int64) tf["price"] = tf["price"].astype(np.float64) tf["qty"] = tf["qty"].astype(np.float64) tf["timestamp"] = tf["timestamp"].astype(np.int64) tf["is_buyer_maker"] = tf["is_buyer_maker"].astype(np.int8) tf.sort_values("trade_id", inplace=True) tf.drop_duplicates("trade_id", inplace=True) tf.reset_index(drop=True, inplace=True) if df.empty: df = tf else: df = pd.concat([df, tf]) except Exception as e: print('Failed to fetch', date, e) return df
async def get_all_fills(self, symbol: str = None, start_time: int = None): fills = [] i = 0 while True: i += 1 if i >= 15: print( "\nWarning: more than 15 calls to fetch_fills(), breaking") break fetched = await self.fetch_fills(symbol=symbol, start_time=start_time) print_(['fetched fills', ts_to_date(fetched[0]['timestamp'])]) if fetched == fills[-len(fetched):]: break fills += fetched if len(fetched) < 1000: break start_time = fills[-1]['timestamp'] fills_d = {e['id']: e for e in fills} return sorted(fills_d.values(), key=lambda x: x['timestamp'])
async def fetch_ticks( self, from_id: int = None, start_time: int = None, end_time: int = None, do_print: bool = True, ): params = {"symbol": self.symbol, "limit": 1000} if from_id is not None: params["fromId"] = max(0, from_id) if start_time is not None: params["startTime"] = int(start_time) if end_time is not None: params["endTime"] = int(end_time) try: fetched = await self.public_get(self.endpoints["ticks"], params) except Exception as e: print("error fetching ticks a", e) traceback.print_exc() return [] try: ticks = [{ "trade_id": int(t["a"]), "price": float(t["p"]), "qty": float(t["q"]), "timestamp": int(t["T"]), "is_buyer_maker": t["m"], } for t in fetched] if do_print: print_([ "fetched ticks", self.symbol, ticks[0]["trade_id"], ts_to_date(float(ticks[0]["timestamp"]) / 1000), ]) except Exception as e: print("error fetching ticks b", e, fetched) ticks = [] if do_print: print_(["fetched no new ticks", self.symbol]) return ticks
async def on_market_stream_event(self, ticks: [dict]): if ticks: for tick in ticks: if tick['is_buyer_maker']: self.ob[0] = tick['price'] else: self.ob[1] = tick['price'] self.price = ticks[-1]['price'] now = time() if now - self.ts_released['force_update'] > self.force_update_interval: self.ts_released['force_update'] = now # force update pos and open orders thru rest API every 30 sec await asyncio.gather(self.update_position(), self.update_open_orders()) if now - self.ts_released['print'] >= 0.5: self.update_output_information() if now - self.heartbeat_ts > 60 * 60: # print heartbeat once an hour print_(['heartbeat\n'], n=True) self.heartbeat_ts = time() await self.cancel_and_create()
def save_dataframe(self, df: pd.DataFrame, filename: str, missing: bool, verified: bool) -> str: """ Saves a processed dataframe. Creates the name based on first and last trade id and first and last timestamp. Deletes dataframes that are obsolete. For example, when gaps were filled. @param df: The dataframe to save. @param filename: The current name of the dataframe. @param missing: If the dataframe had gaps. @return: """ if verified: new_name = f'{df["trade_id"].iloc[0]}_{df["trade_id"].iloc[-1]}_{df["timestamp"].iloc[0]}_{df["timestamp"].iloc[-1]}_verified.csv' else: new_name = f'{df["trade_id"].iloc[0]}_{df["trade_id"].iloc[-1]}_{df["timestamp"].iloc[0]}_{df["timestamp"].iloc[-1]}.csv' if new_name != filename: print_([ "Saving file", new_name, ts_to_date(int(new_name.split("_")[2]) / 1000), ]) df.to_csv(os.path.join(self.filepath, new_name), index=False) new_name = "" try: os.remove(os.path.join(self.filepath, filename)) print_(["Removed file", filename]) except: pass elif missing: print_(["Replacing file", filename]) df.to_csv(os.path.join(self.filepath, filename), index=False) else: new_name = "" return new_name
async def init_exchange_config(self) -> bool: try: print_([ await self.private_post( self.endpoints["margin_type"], { "symbol": self.symbol, "marginType": "CROSSED" }, ) ]) except Exception as e: print(e) try: print_([await self.execute_leverage_change()]) except Exception as e: print(e) try: print_([ await self.private_post(self.endpoints["position_side"], {"dualSidePosition": "true"}) ]) except Exception as e: if '"code":-4059' not in e.args[0]: print(e) print("unable to set hedge mode, aborting") raise Exception("failed to set hedge mode")
async def find_time(self, start_time) -> pd.DataFrame: """ Finds the trades according to the time. Uses different approaches for exchanges depending if time based fetching is supported. If time based searching is supported, directly fetch the data. If time based searching is not supported, start with current trades and move closer to start time based on estimation. @param start_time: Time to look for. @return: Dataframe with first trade later or equal to start time. """ try: ticks = await self.bot.fetch_ticks_time(start_time) return self.transform_ticks(ticks) except: print_(["Finding id for start time..."]) ticks = await self.bot.fetch_ticks() df = self.transform_ticks(ticks) highest_id = df["trade_id"].iloc[-1] prev_div = [] first_ts = df["timestamp"].iloc[0] last_ts = df["timestamp"].iloc[-1] first_id = df["trade_id"].iloc[0] length = len(df) while not start_time >= first_ts or not start_time <= last_ts: loop_start = time() nw_id, prev_div, forward = self.new_id(first_ts, last_ts, first_id, length, start_time, prev_div) print_([ "Current time span from", df["timestamp"].iloc[0], "to", df["timestamp"].iloc[-1], "with earliest trade id", df["trade_id"].iloc[0], "estimating distance of", forward, "trades", ]) if nw_id > highest_id: nw_id = highest_id try: ticks = await self.bot.fetch_ticks(from_id=int(nw_id), do_print=False) df = self.transform_ticks(ticks) if not df.empty: first_ts = df["timestamp"].iloc[0] last_ts = df["timestamp"].iloc[-1] first_id = df["trade_id"].iloc[0] length = len(df) if nw_id == 1 and first_ts >= start_time: break except Exception: print("Failed to fetch or transform...") await asyncio.sleep( max(0.0, self.fetch_delay_seconds - time() + loop_start)) print_(["Found id for start time!"]) return df[df["timestamp"] >= start_time]
async def init_exchange_config(self) -> bool: try: print_([await self.private_post(self.endpoints['margin_type'], {'symbol': self.symbol, 'marginType': 'CROSSED'})]) except Exception as e: print(e) try: print_([await self.execute_leverage_change()]) except Exception as e: print(e) try: print_([await self.private_post(self.endpoints['position_side'], {'dualSidePosition': 'true'})]) except Exception as e: if '"code":-4059' not in e.args[0]: print(e) print('unable to set hedge mode, aborting') raise Exception('failed to set hedge mode')
async def cancel_and_create(self): if self.ts_locked["cancel_and_create"] > self.ts_released["cancel_and_create"]: return if any(self.error_halt.values()): print_([f'warning: error in rest api fetch {self.error_halt}, halting order creations/cancellations']) return self.ts_locked["cancel_and_create"] = time() try: to_cancel_, to_create_ = filter_orders( self.open_orders, self.calc_orders(), keys=["side", "position_side", "qty", "price"], ) to_cancel, to_create = [], [] for elm in to_cancel_: if elm["position_side"] == "long": if self.long_mode == "tp_only": if elm["side"] == "sell": to_cancel.append(elm) elif self.long_mode != "manual": to_cancel.append(elm) if elm["position_side"] == "shrt": if self.shrt_mode == "tp_only": if elm["side"] == "buy": to_cancel.append(elm) elif self.shrt_mode != "manual": to_cancel.append(elm) for elm in to_create_: if elm["position_side"] == "long": if self.long_mode == "tp_only": if elm["side"] == "sell": to_create.append(elm) elif self.long_mode != "manual": to_create.append(elm) if elm["position_side"] == "shrt": if self.shrt_mode == "tp_only": if elm["side"] == "buy": to_create.append(elm) elif self.shrt_mode != "manual": to_create.append(elm) to_cancel = sorted(to_cancel, key=lambda x: calc_diff(x["price"], self.price)) to_create = sorted(to_create, key=lambda x: calc_diff(x["price"], self.price)) results = [] if to_cancel: # to avoid building backlog, cancel n+1 orders, create n orders results.append( asyncio.create_task( self.cancel_orders(to_cancel[: self.n_orders_per_execution + 1]) ) ) await asyncio.sleep( 0.01 ) # sleep 10 ms between sending cancellations and sending creations if to_create: results.append( await self.create_orders(to_create[: self.n_orders_per_execution]) ) if any(results): print() await asyncio.sleep( self.delay_between_executions ) # sleep before releasing lock return results finally: self.ts_released["cancel_and_create"] = time()
async def prepare_files(self): """ Takes downloaded data and prepares a numpy array for use in backtesting. @return: """ filenames = [ f for f in self.get_filenames() if int(f.split("_")[3].split(".")[0]) >= self.start_time and int(f.split("_")[2]) <= self.end_time ] left_overs = pd.DataFrame() sample_size_ms = 1000 current_index = 0 try: first_frame = pd.read_csv( os.path.join(self.filepath, filenames[0]), dtype={ "price": np.float64, "is_buyer_maker": np.float64, "timestamp": np.float64, "qty": np.float64, }, usecols=["price", "is_buyer_maker", "timestamp", "qty"], ) first_frame = first_frame[ (first_frame["timestamp"] >= self.start_time) & (first_frame["timestamp"] <= self.end_time)] earliest_time = first_frame.timestamp.iloc[ 0] // sample_size_ms * sample_size_ms except Exception as e: print_(["Error in determining earliest time", e]) earliest_time = self.start_time try: last_frame = pd.read_csv( os.path.join(self.filepath, filenames[-1]), dtype={ "price": np.float64, "is_buyer_maker": np.float64, "timestamp": np.float64, "qty": np.float64, }, usecols=["price", "is_buyer_maker", "timestamp", "qty"], ) last_frame = last_frame[ (last_frame["timestamp"] >= self.start_time) & (last_frame["timestamp"] <= self.end_time)] latest_time = last_frame.timestamp.iloc[ -1] // sample_size_ms * sample_size_ms except Exception as e: print_(["Error in determining latest time", e]) latest_time = self.end_time array = np.zeros( (int((latest_time - earliest_time) / sample_size_ms + 1), 3), dtype=np.float64, ) for f in filenames: chunk = pd.read_csv( os.path.join(self.filepath, f), dtype={ "price": np.float64, "is_buyer_maker": np.float64, "timestamp": np.float64, "qty": np.float64, }, usecols=["price", "is_buyer_maker", "timestamp", "qty"], ) chunk = pd.concat([left_overs, chunk]) chunk.sort_values("timestamp", inplace=True) chunk = chunk[(chunk["timestamp"] >= self.start_time) & (chunk["timestamp"] <= self.end_time)] cut_off = ( chunk.timestamp.iloc[-1] // sample_size_ms * sample_size_ms - 1 - (1 * sample_size_ms)) left_overs = chunk[chunk["timestamp"] > cut_off] chunk = chunk[chunk["timestamp"] <= cut_off] sampled_ticks = calc_samples(chunk[["timestamp", "qty", "price"]].values) if current_index != 0 and array[current_index - 1, 0] + 1000 != sampled_ticks[0, 0]: size = int( (sampled_ticks[0, 0] - array[current_index - 1, 0]) / sample_size_ms) - 1 tmp = np.zeros((size, 3), dtype=np.float64) tmp[:, 0] = np.arange( array[current_index - 1, 0] + sample_size_ms, sampled_ticks[0, 0], sample_size_ms, dtype=np.float64, ) tmp[:, 2] = array[current_index - 1, 2] array[current_index:current_index + len(tmp)] = tmp current_index += len(tmp) array[current_index:current_index + len(sampled_ticks)] = sampled_ticks current_index += len(sampled_ticks) print( "\rloaded chunk of data", f, ts_to_date(float(f.split("_")[2]) / 1000), end=" ", ) print("\n") # Fill in anything left over if not left_overs.empty: sampled_ticks = calc_samples( left_overs[["timestamp", "qty", "price"]].values) if current_index != 0 and array[current_index - 1, 0] + 1000 != sampled_ticks[0, 0]: size = int( (sampled_ticks[0, 0] - array[current_index - 1, 0]) / sample_size_ms) - 1 tmp = np.zeros((size, 3), dtype=np.float64) tmp[:, 0] = np.arange( array[current_index - 1, 0] + sample_size_ms, sampled_ticks[0, 0], sample_size_ms, dtype=np.float64, ) tmp[:, 2] = array[current_index - 1, 2] array[current_index:current_index + len(tmp)] = tmp current_index += len(tmp) array[current_index:current_index + len(sampled_ticks)] = sampled_ticks current_index += len(sampled_ticks) # Fill the gap at the end with the latest price # Should not be necessary anymore if current_index + 1 < len(array): size = len(array) - current_index tmp = np.zeros((size, 3), dtype=np.float64) tmp[:, 0] = np.arange( array[current_index - 1, 0] + sample_size_ms, array[current_index - 1, 0] + ((size + 1) * sample_size_ms), sample_size_ms, dtype=np.float64, ) tmp[:, 2] = array[current_index - 1, 2] array[current_index:current_index + len(tmp)] = tmp current_index += len(tmp) print_([ "Saving single file with", len(array), " ticks to", self.tick_filepath, "...", ]) np.save(self.tick_filepath, array) print_(["Saved single file!"])
async def download_ticks(self): """ Searches for previously downloaded files and fills gaps in them if necessary. Downloads any missing data based on the specified time frame. @return: """ if self.config["exchange"] == "binance": if self.spot: self.bot = await create_binance_bot_spot( get_dummy_settings(self.config)) else: self.bot = await create_binance_bot( get_dummy_settings(self.config)) elif self.config["exchange"] == "bybit": self.bot = await create_bybit_bot(get_dummy_settings(self.config)) else: print(self.config["exchange"], "not found") return filenames = self.get_filenames() mod_files = [] highest_id = 0 for f in filenames: verified = False try: first_time = int(f.split("_")[2]) last_time = int(f.split("_")[3].split(".")[0]) if len(f.split("_")) > 4: verified = True except: first_time = sys.maxsize last_time = sys.maxsize if (not verified and last_time >= self.start_time and (self.end_time == -1 or (first_time <= self.end_time)) or last_time == sys.maxsize): print_(["Validating file", f, ts_to_date(first_time / 1000)]) df = self.read_dataframe(os.path.join(self.filepath, f)) missing, df, gaps = self.validate_dataframe(df) exists = False if gaps.empty: first_id = df["trade_id"].iloc[0] self.save_dataframe(df, f, missing, True) else: first_id = (df["trade_id"].iloc[0] if df["trade_id"].iloc[0] < gaps["start"].iloc[0] else gaps["start"].iloc[0]) if not gaps.empty and (f != filenames[-1] or str(first_id - first_id % 100000) not in f): last_id = df["trade_id"].iloc[-1] for i in filenames: tmp_first_id = int(i.split("_")[0]) tmp_last_id = int(i.split("_")[1].replace(".csv", "")) if ((first_id - first_id % 100000) == tmp_first_id and ((first_id - first_id % 100000 + 99999) == tmp_last_id or (highest_id == tmp_first_id or highest_id == tmp_last_id) or highest_id > last_id) and first_id != 1 and i != f): exists = True break if missing and df["timestamp"].iloc[ -1] > self.start_time and not exists: current_time = df["timestamp"].iloc[-1] for i in gaps.index: print_([ "Filling gaps from id", gaps["start"].iloc[i], "to id", gaps["end"].iloc[i], ]) current_id = gaps["start"].iloc[i] while current_id < gaps["end"].iloc[i] and utc_ms( ) - current_time > 10000: loop_start = time() try: fetched_new_trades = await self.bot.fetch_ticks( int(current_id)) tf = self.transform_ticks(fetched_new_trades) if tf.empty: print_([ "Response empty. No new trades, exiting..." ]) await asyncio.sleep( max( 0.0, self.fetch_delay_seconds - time() + loop_start, )) break if current_id == tf["trade_id"].iloc[-1]: print_([ "Same trade ID again. No new trades, exiting..." ]) await asyncio.sleep( max( 0.0, self.fetch_delay_seconds - time() + loop_start, )) break current_id = tf["trade_id"].iloc[-1] df = pd.concat([df, tf]) df.sort_values("trade_id", inplace=True) df.drop_duplicates("trade_id", inplace=True) df = df[df["trade_id"] <= gaps["end"].iloc[i] - gaps["end"].iloc[i] % 100000 + 99999] df.reset_index(drop=True, inplace=True) current_time = df["timestamp"].iloc[-1] except Exception: print_(["Failed to fetch or transform..."]) await asyncio.sleep( max( 0.0, self.fetch_delay_seconds - time() + loop_start)) if not df.empty: if df["trade_id"].iloc[-1] > highest_id: highest_id = df["trade_id"].iloc[-1] if not exists: tf = df[df["trade_id"].mod(100000) == 0] if len(tf) > 1: df = df[:tf.index[-1]] nf = self.save_dataframe(df, f, missing, verified) mod_files.append(nf) elif df["trade_id"].iloc[0] != 1: os.remove(os.path.join(self.filepath, f)) print_(["Removed file fragment", f]) chunk_gaps = [] filenames = self.get_filenames() prev_last_id = 0 prev_last_time = self.start_time for f in filenames: first_id = int(f.split("_")[0]) last_id = int(f.split("_")[1]) first_time = int(f.split("_")[2]) last_time = int(f.split("_")[3].split(".")[0]) if (first_id - 1 != prev_last_id and f not in mod_files and first_time >= prev_last_time and first_time >= self.start_time and not prev_last_time > self.end_time): chunk_gaps.append( (prev_last_time, first_time, prev_last_id, first_id - 1)) if first_time >= self.start_time or last_time >= self.start_time: prev_last_id = last_id prev_last_time = last_time if len(filenames) < 1: chunk_gaps.append((self.start_time, self.end_time, 0, 0)) if prev_last_time < self.end_time: chunk_gaps.append((prev_last_time, self.end_time, prev_last_id, 0)) seen = set() chunk_gaps_dedup = [] for elm in chunk_gaps: if elm not in seen: chunk_gaps_dedup.append(elm) seen.add(elm) chunk_gaps = chunk_gaps_dedup for gaps in chunk_gaps: start_time, end_time, start_id, end_id = gaps df = pd.DataFrame() current_id = start_id + 1 current_time = start_time if self.config["exchange"] == "binance": fetched_new_trades = await self.bot.fetch_ticks(1) tf = self.transform_ticks(fetched_new_trades) earliest = tf["timestamp"].iloc[0] if earliest > start_time: start_time = earliest current_time = start_time tmp = pd.date_range( start=datetime.datetime.fromtimestamp( start_time / 1000, datetime.timezone.utc).date(), end=datetime.datetime.fromtimestamp( end_time / 1000, datetime.timezone.utc).date(), freq="D", ).to_pydatetime() days = [date.strftime("%Y-%m-%d") for date in tmp] df = pd.DataFrame(columns=[ "trade_id", "price", "qty", "timestamp", "is_buyer_maker" ]) months_done = set() months_failed = set() for day in days: month = day[:7] if month in months_done: continue if month in months_failed: tf = self.get_zip(self.daily_base_url, self.config["symbol"], day) if tf.empty: print_(["failed to fetch daily", day]) continue else: tf = self.get_zip(self.monthly_base_url, self.config["symbol"], month) if tf.empty: print_(["failed to fetch monthly", month]) months_failed.add(month) tf = self.get_zip(self.daily_base_url, self.config["symbol"], day) else: months_done.add(month) tf = tf[tf["timestamp"] >= start_time] tf = tf[tf["timestamp"] <= end_time] if start_id != 0: tf = tf[tf["trade_id"] > start_id] if end_id != 0: tf = tf[tf["trade_id"] <= end_id] if df.empty: df = tf else: df = pd.concat([df, tf]) df.sort_values("trade_id", inplace=True) df.drop_duplicates("trade_id", inplace=True) df.reset_index(drop=True, inplace=True) if not df.empty and ((df["trade_id"].iloc[0] % 100000 == 0 and len(df) >= 100000) or df["trade_id"].iloc[0] % 100000 != 0): for index, row in df[df["trade_id"] % 100000 == 0].iterrows(): if index != 0: self.save_dataframe( df[(df["trade_id"] >= row["trade_id"] - 1000000) & (df["trade_id"] < row["trade_id"])], "", True, False, ) df = df[df["trade_id"] >= row["trade_id"]] if not df.empty: start_id = df["trade_id"].iloc[0] - 1 start_time = df["timestamp"].iloc[0] current_time = df["timestamp"].iloc[-1] current_id = df["trade_id"].iloc[-1] + 1 if start_id == 0: df = await self.find_time(start_time) current_id = df["trade_id"].iloc[-1] + 1 current_time = df["timestamp"].iloc[-1] end_id = sys.maxsize if end_id == 0 else end_id - 1 if current_id <= end_id and current_time <= end_time and utc_ms( ) - current_time > 10000: print_([ "Downloading from", ts_to_date(float(current_time) / 1000), "to", ts_to_date(float(end_time) / 1000), ]) while (current_id <= end_id and current_time <= end_time and utc_ms() - current_time > 10000): loop_start = time() fetched_new_trades = await self.bot.fetch_ticks(int(current_id) ) tf = self.transform_ticks(fetched_new_trades) if tf.empty: print_(["Response empty. No new trades, exiting..."]) await asyncio.sleep( max(0.0, self.fetch_delay_seconds - time() + loop_start)) break if current_id == tf["trade_id"].iloc[-1]: print_(["Same trade ID again. No new trades, exiting..."]) await asyncio.sleep( max(0.0, self.fetch_delay_seconds - time() + loop_start)) break df = pd.concat([df, tf]) df.sort_values("trade_id", inplace=True) df.drop_duplicates("trade_id", inplace=True) df.reset_index(drop=True, inplace=True) current_time = tf["timestamp"].iloc[-1] current_id = tf["trade_id"].iloc[-1] + 1 tf = df[df["trade_id"].mod(100000) == 0] if not tf.empty and len(df) > 1: if df["trade_id"].iloc[0] % 100000 == 0 and len(tf) > 1: self.save_dataframe(df[:tf.index[-1]], "", True, False) df = df[tf.index[-1]:] elif df["trade_id"].iloc[0] % 100000 != 0 and len(tf) == 1: self.save_dataframe(df[:tf.index[-1]], "", True, False) df = df[tf.index[-1]:] await asyncio.sleep( max(0.0, self.fetch_delay_seconds - time() + loop_start)) if not df.empty: df = df[df["timestamp"] >= start_time] if start_id != 0 and not df.empty: df = df[df["trade_id"] > start_id] elif end_id != sys.maxsize and not df.empty: df = df[df["trade_id"] <= end_id] elif end_time != sys.maxsize and not df.empty: df = df[df["timestamp"] <= end_time] if not df.empty: self.save_dataframe(df, "", True, False) try: await self.bot.session.close() except: pass