async def prepare_backtest_config(args) -> dict: """ takes argparse args, returns dict with backtest and optimize config """ config = load_hjson_config(args.backtest_config_path) for key in [ "symbol", "user", "start_date", "end_date", "starting_balance", "market_type", "base_dir", "ohlcv", ]: if hasattr(args, key) and getattr(args, key) is not None: config[key] = getattr(args, key) elif key not in config: config[key] = None if args.market_type is None: config["spot"] = False else: config["spot"] = args.market_type == "spot" config["start_date"] = ts_to_date_utc(date_to_ts( config["start_date"]))[:10] config["end_date"] = ts_to_date_utc(date_to_ts(config["end_date"]))[:10] config["exchange"], _, _ = load_exchange_key_secret(config["user"]) config["session_name"] = ( f"{config['start_date'].replace(' ', '').replace(':', '').replace('.', '')}_" f"{config['end_date'].replace(' ', '').replace(':', '').replace('.', '')}" ) if config["base_dir"].startswith("~"): raise Exception( "error: using the ~ to indicate the user's home directory is not supported" ) base_dirpath = os.path.join( config["base_dir"], f"{config['exchange']}{'_spot' if 'spot' in config['market_type'] else ''}", config["symbol"], ) config["caches_dirpath"] = make_get_filepath( os.path.join(base_dirpath, "caches", "")) config["optimize_dirpath"] = make_get_filepath( os.path.join(base_dirpath, "optimize", "")) config["plots_dirpath"] = make_get_filepath( os.path.join(base_dirpath, "plots", "")) await add_market_specific_settings(config) return config
async def fetch_ticks(self, from_id: int = None, do_print: bool = True): params = {'symbol': self.symbol, 'limit': 1000} if from_id is not None: params['from'] = max(0, from_id) try: ticks = await self.public_get(self.endpoints['ticks'], params) except Exception as e: print('error fetching ticks', e) return [] try: trades = [{ 'trade_id': int(tick['id']), 'price': float(tick['price']), 'qty': float(tick['qty']), 'timestamp': date_to_ts(tick['time']), 'is_buyer_maker': tick['side'] == 'Sell' } for tick in ticks['result']] if do_print: print_([ 'fetched trades', self.symbol, trades[0]['trade_id'], ts_to_date(float(trades[0]['timestamp']) / 1000) ]) except: trades = [] if do_print: print_(['fetched no new trades', self.symbol]) return trades
async def fetch_ticks(self, from_id: int = None, do_print: bool = True): params = {"symbol": self.symbol, "limit": 1000} if from_id is not None: params["from"] = max(0, from_id) try: ticks = await self.public_get(self.endpoints["ticks"], params) except Exception as e: print("error fetching ticks", e) return [] try: trades = [{ "trade_id": int(tick["id"]), "price": float(tick["price"]), "qty": float(tick["qty"]), "timestamp": date_to_ts(tick["time"]), "is_buyer_maker": tick["side"] == "Sell", } for tick in ticks["result"]] if do_print: print_([ "fetched trades", self.symbol, trades[0]["trade_id"], ts_to_date(float(trades[0]["timestamp"]) / 1000), ]) except: trades = [] if do_print: print_(["fetched no new trades", self.symbol]) return trades
def make_tick_samples(config: dict, sec_span: int = 1): """ makes tick samples from agg_trades tick samples are [(qty, price, timestamp)] config must include parameters - exchange: str - symbol: str - spot: bool - start_date: str - end_date: str """ for key in ["exchange", "symbol", "spot", "start_date", "end_date"]: assert key in config start_ts = date_to_ts(config["start_date"]) end_ts = date_to_ts(config["end_date"]) ticks_filepath = os.path.join( "historical_data", config["exchange"], f"agg_trades_{'spot' if config['spot'] else 'futures'}", config["symbol"], "", ) if not os.path.exists(ticks_filepath): return ticks_filenames = sorted( [f for f in os.listdir(ticks_filepath) if f.endswith(".csv")]) ticks = np.empty((0, 3)) sts = time() for f in ticks_filenames: _, _, first_ts, last_ts = map(int, f.replace(".csv", "").split("_")) if first_ts > end_ts or last_ts < start_ts: continue print(f"\rloading chunk {ts_to_date(first_ts / 1000)}", end=" ") tdf = pd.read_csv(ticks_filepath + f) tdf = tdf[(tdf.timestamp >= start_ts) & (tdf.timestamp <= end_ts)] ticks = np.concatenate((ticks, tdf[["timestamp", "qty", "price"]].values)) del tdf samples = calc_samples(ticks[ticks[:, 0].argsort()], sec_span * 1000) print( f"took {time() - sts:.2f} seconds to load {len(ticks)} ticks, creating {len(samples)} samples" ) del ticks return samples
def load_hlc_cache(symbol, start_date, end_date, base_dir="backtests", spot=False): cache_fname = ( f"{ts_to_date_utc(date_to_ts(start_date))[:10]}_" + f"{ts_to_date_utc(date_to_ts(end_date))[:10]}_ohlcv_cache.npy") filepath = make_get_filepath( os.path.join(base_dir, "binance" + ("_spot" if spot else ""), symbol, "caches", cache_fname)) if os.path.exists(filepath): return np.load(filepath) df = download_ohlcvs(symbol, start_date, end_date) df = df[df.timestamp >= date_to_ts(start_date)] df = df[df.timestamp <= date_to_ts(end_date)] data = df[["timestamp", "high", "low", "close"]].values np.save(filepath, data) return data
def make_tick_samples(config: dict, sec_span: int = 1): ''' makes tick samples from agg_trades tick samples are [(qty, price, timestamp)] config must include parameters - exchange: str - symbol: str - spot: bool - start_date: str - end_date: str ''' for key in ['exchange', 'symbol', 'spot', 'start_date', 'end_date']: assert key in config start_ts = date_to_ts(config['start_date']) end_ts = date_to_ts(config['end_date']) ticks_filepath = os.path.join( 'historical_data', config['exchange'], f"agg_trades_{'spot' if config['spot'] else 'futures'}", config['symbol'], '') if not os.path.exists(ticks_filepath): return ticks_filenames = sorted( [f for f in os.listdir(ticks_filepath) if f.endswith('.csv')]) ticks = np.empty((0, 3)) sts = time() for f in ticks_filenames: _, _, first_ts, last_ts = map(int, f.replace('.csv', '').split('_')) if first_ts > end_ts or last_ts < start_ts: continue print(f'\rloading chunk {ts_to_date(first_ts / 1000)}', end=' ') tdf = pd.read_csv(ticks_filepath + f) tdf = tdf[(tdf.timestamp >= start_ts) & (tdf.timestamp <= end_ts)] ticks = np.concatenate((ticks, tdf[['timestamp', 'qty', 'price']].values)) del tdf samples = calc_samples(ticks[ticks[:, 0].argsort()], sec_span * 1000) print( f'took {time() - sts:.2f} seconds to load {len(ticks)} ticks, creating {len(samples)} samples' ) del ticks return samples
async def fetch_open_orders(self) -> [dict]: fetched = await self.private_get(self.endpoints['open_orders'], {'symbol': self.symbol}) return [{ 'order_id': elm['order_id'], 'custom_id': elm['order_link_id'], 'symbol': elm['symbol'], 'price': float(elm['price']), 'qty': float(elm['qty']), 'side': elm['side'].lower(), 'position_side': determine_pos_side(elm), 'timestamp': date_to_ts(elm[self.endpoints['created_at_key']]) } for elm in fetched['result']]
async def fetch_open_orders(self) -> [dict]: fetched = await self.private_get(self.endpoints["open_orders"], {"symbol": self.symbol}) return [{ "order_id": elm["order_id"], "custom_id": elm["order_link_id"], "symbol": elm["symbol"], "price": float(elm["price"]), "qty": float(elm["qty"]), "side": elm["side"].lower(), "position_side": determine_pos_side(elm), "timestamp": date_to_ts(elm[self.endpoints["created_at_key"]]), } for elm in fetched["result"]]
def standardize_user_stream_event( self, event: Union[List[Dict], Dict]) -> Union[List[Dict], Dict]: events = [] if 'topic' in event: if event['topic'] == 'order': for elm in event['data']: if elm['symbol'] == self.symbol: if elm['order_status'] == 'Created': pass elif elm['order_status'] == 'Rejected': pass elif elm['order_status'] == 'New': new_open_order = { 'order_id': elm['order_id'], 'symbol': elm['symbol'], 'price': float(elm['price']), 'qty': float(elm['qty']), 'type': elm['order_type'].lower(), 'side': (side := elm['side'].lower()), 'timestamp': date_to_ts(elm['timestamp' if self. inverse else 'update_time']) } if 'inverse_perpetual' in self.market_type: if self.position['long']['size'] == 0.0: if self.position['shrt']['size'] == 0.0: new_open_order[ 'position_side'] = 'long' if new_open_order[ 'side'] == 'buy' else 'shrt' else: new_open_order[ 'position_side'] = 'shrt' else: new_open_order['position_side'] = 'long' elif 'inverse_futures' in self.market_type: new_open_order[ 'position_side'] = determine_pos_side(elm) else: new_open_order['position_side'] = ('long' if ( (new_open_order['side'] == 'buy' and elm['create_type'] == 'CreateByUser') or (new_open_order['side'] == 'sell' and elm['create_type'] == 'CreateByClosing')) else 'shrt') events.append({'new_open_order': new_open_order}) elif elm['order_status'] == 'PartiallyFilled': events.append({ 'deleted_order_id': elm['order_id'], 'partially_filled': True }) elif elm['order_status'] == 'Filled': events.append({ 'deleted_order_id': elm['order_id'], 'filled': True }) elif elm['order_status'] == 'Cancelled': events.append( {'deleted_order_id': elm['order_id']}) elif elm['order_status'] == 'PendingCancel': pass else: events.append({ 'other_symbol': elm['symbol'], 'other_type': event['topic'] }) elif event['topic'] == 'execution': for elm in event['data']: if elm['symbol'] == self.symbol: if elm['exec_type'] == 'Trade': # already handled by "order" pass else: events.append({ 'other_symbol': elm['symbol'], 'other_type': event['topic'] }) elif event['topic'] == 'position': for elm in event['data']: if elm['symbol'] == self.symbol: standardized = {} if elm['side'] == 'Buy': standardized['long_psize'] = round_( float(elm['size']), self.qty_step) standardized['long_pprice'] = float( elm['entry_price']) elif elm['side'] == 'Sell': standardized['shrt_psize'] = -round_( abs(float(elm['size'])), self.qty_step) standardized['shrt_pprice'] = float( elm['entry_price']) events.append(standardized) if self.inverse: events.append({ 'wallet_balance': float(elm['wallet_balance']) }) else: events.append({ 'other_symbol': elm['symbol'], 'other_type': event['topic'] }) elif not self.inverse and event['topic'] == 'wallet': for elm in event['data']: events.append( {'wallet_balance': float(elm['wallet_balance'])}) return events
def download_ohlcvs(symbol, start_date, end_date, download_only=False) -> pd.DataFrame: dirpath = make_get_filepath(f"historical_data/ohlcvs_futures/{symbol}/") base_url = f"https://data.binance.vision/data/futures/um/" col_names = ["timestamp", "open", "high", "low", "close", "volume"] start_ts = date_to_ts(start_date) end_ts = date_to_ts(end_date) days = [ ts_to_date_utc(x)[:10] for x in list(range(start_ts, end_ts, 1000 * 60 * 60 * 24)) ] months = sorted(set([x[:7] for x in days])) months_done = set() dfs = [] for month in months: month_filepath = dirpath + month + ".csv" if os.path.exists(month_filepath): months_done.add(month) if not download_only: dfs.append(pd.read_csv(month_filepath)) continue try: url = base_url + f"monthly/klines/{symbol}/1m/{symbol}-1m-{month}.zip" print("fetching", url) csv = get_zip(url) csv.to_csv(month_filepath) months_done.add(month) if not download_only: dfs.append(csv) for f in os.listdir(dirpath): if month in f and len(f) > 11: print("deleting", dirpath + f) os.remove(dirpath + f) except Exception as e: if month != months[-1]: months_done.add(month) print(e) for day in days: if day[:7] in months_done: continue day_filepath = dirpath + day + ".csv" if os.path.exists(day_filepath): if not download_only: dfs.append(pd.read_csv(day_filepath)) continue try: print("fetching", day_filepath) csv = get_zip(base_url + f"daily/klines/{symbol}/1m/{symbol}-1m-{day}.zip") csv.to_csv(day_filepath) if not download_only: dfs.append(csv) except Exception as e: print(e) break if not download_only: df = pd.concat(dfs)[col_names].sort_values("timestamp") df = df.drop_duplicates(subset=["timestamp"]).reset_index() nindex = np.arange(df.timestamp.iloc[0], df.timestamp.iloc[-1] + 60000, 60000) return (df[col_names].set_index("timestamp").reindex(nindex).fillna( method="ffill").reset_index())
def standardize_user_stream_event( self, event: Union[List[Dict], Dict]) -> Union[List[Dict], Dict]: events = [] if "topic" in event: if event["topic"] == "order": for elm in event["data"]: if elm["symbol"] == self.symbol: if elm["order_status"] == "Created": pass elif elm["order_status"] == "Rejected": pass elif elm["order_status"] == "New": new_open_order = { "order_id": elm["order_id"], "symbol": elm["symbol"], "price": float(elm["price"]), "qty": float(elm["qty"]), "type": elm["order_type"].lower(), "side": (side := elm["side"].lower()), "timestamp": date_to_ts(elm["timestamp" if self. inverse else "update_time"]), } if "inverse_perpetual" in self.market_type: if self.position["long"]["size"] == 0.0: if self.position["short"]["size"] == 0.0: new_open_order["position_side"] = ( "long" if new_open_order["side"] == "buy" else "short") else: new_open_order[ "position_side"] = "short" else: new_open_order["position_side"] = "long" elif "inverse_futures" in self.market_type: new_open_order[ "position_side"] = determine_pos_side(elm) else: new_open_order["position_side"] = ( "long" if ((new_open_order["side"] == "buy" and elm["create_type"] == "CreateByUser") or (new_open_order["side"] == "sell" and elm["create_type"] == "CreateByClosing")) else "short") events.append({"new_open_order": new_open_order}) elif elm["order_status"] == "PartiallyFilled": events.append({ "deleted_order_id": elm["order_id"], "partially_filled": True, }) elif elm["order_status"] == "Filled": events.append({ "deleted_order_id": elm["order_id"], "filled": True }) elif elm["order_status"] == "Cancelled": events.append( {"deleted_order_id": elm["order_id"]}) elif elm["order_status"] == "PendingCancel": pass else: events.append({ "other_symbol": elm["symbol"], "other_type": event["topic"], })