Esempio n. 1
0
 def __init__(self, config: dict):
     self.fetch_delay_seconds = 0.75
     self.config = config
     self.spot = 'spot' in config and config['spot']
     self.tick_filepath = os.path.join(
         config["caches_dirpath"],
         f"{config['session_name']}_ticks_cache.npy")
     try:
         self.start_time = int(
             parser.parse(self.config["start_date"]).replace(
                 tzinfo=datetime.timezone.utc).timestamp() * 1000)
     except Exception:
         raise Exception(
             f"Unrecognized date format for start time {config['start_date']}"
         )
     try:
         self.end_time = int(
             parser.parse(self.config["end_date"]).replace(
                 tzinfo=datetime.timezone.utc).timestamp() * 1000)
         if self.end_time > utc_ms():
             raise Exception(
                 f"End date later than current time {config['end_date']}")
     except Exception:
         raise Exception(
             f"Unrecognized date format for end time {config['end_date']}")
     if self.config['exchange'] == 'binance':
         if self.spot:
             self.daily_base_url = "https://data.binance.vision/data/spot/daily/aggTrades/"
             self.monthly_base_url = "https://data.binance.vision/data/spot/monthly/aggTrades/"
         else:
             market_type = 'cm' if config['inverse'] else 'um'
             self.daily_base_url = f"https://data.binance.vision/data/futures/{market_type}/daily/aggTrades/"
             self.monthly_base_url = f"https://data.binance.vision/data/futures/{market_type}/monthly/aggTrades/"
     elif self.config['exchange'] == 'bybit':
         self.daily_base_url = 'https://public.bybit.com/trading/'
     else:
         raise Exception(f"unknown exchange {config['exchange']}")
     if "historical_data_path" in self.config and self.config[
             "historical_data_path"]:
         self.filepath = make_get_filepath(
             os.path.join(
                 self.config["historical_data_path"], "historical_data",
                 self.config["exchange"],
                 f"agg_trades_{'spot' if self.spot else 'futures'}",
                 self.config["symbol"], ""))
     else:
         self.filepath = make_get_filepath(
             os.path.join(
                 "historical_data", self.config["exchange"],
                 f"agg_trades_{'spot' if self.spot else 'futures'}",
                 self.config["symbol"], ""))
Esempio n. 2
0
def plot_wrap(config, data):
    print("n_days", round_(config["n_days"], 0.1))
    print("starting_balance", config["starting_balance"])
    print("backtesting...")
    sts = time()
    fills_long, fills_short, stats = backtest(config, data, do_print=True)
    print(f"{time() - sts:.2f} seconds elapsed")
    if not fills_long and not fills_short:
        print("no fills")
        return
    longs, shorts, sdf, result = analyze_fills(fills_long, fills_short, stats,
                                               config)
    config["result"] = result
    config["plots_dirpath"] = make_get_filepath(
        os.path.join(config["plots_dirpath"],
                     f"{ts_to_date(time())[:19].replace(':', '')}", ""))
    longs.to_csv(config["plots_dirpath"] + "fills_long.csv")
    shorts.to_csv(config["plots_dirpath"] + "fills_short.csv")
    sdf.to_csv(config["plots_dirpath"] + "stats.csv")
    df = pd.DataFrame({
        **{
            "timestamp": data[:, 0],
            "qty": data[:, 1],
            "price": data[:, 2]
        },
        **{}
    })
    print("dumping plots...")
    dump_plots(config, longs, shorts, sdf, df, n_parts=config["n_parts"])
Esempio n. 3
0
def backtest_single_wrap(config_: dict):
    config = config_.copy()
    exchange_name = config['exchange'] + ('_spot' if config['market_type'] == 'spot' else '')
    cache_filepath = f"backtests/{exchange_name}/{config['symbol']}/caches/"
    ticks_filepath = cache_filepath + f"{config['start_date']}_{config['end_date']}_ticks_cache.npy"
    mss = json.load(open(cache_filepath + 'market_specific_settings.json'))
    ticks = np.load(ticks_filepath)
    config.update(mss)
    try:
        fills, stats = backtest(config, ticks)
        fdf, sdf, analysis = analyze_fills(fills, stats, config)
        pa_closeness_long = analysis['pa_closeness_mean_long']
        pa_closeness_shrt = analysis['pa_closeness_mean_shrt']
        adg = analysis['average_daily_gain']
        print(f"backtested {config['symbol']: <12} pa closeness long {pa_closeness_long:.6f} "
              f"pa closeness shrt {pa_closeness_shrt:.6f} adg {adg:.6f}")
    except Exception as e:
        print(f'error with {config["symbol"]} {e}')
        print('config')
        traceback.print_exc()
        adg = 0.0
        pa_closeness_long = pa_closeness_shrt = 100.0
        with open(make_get_filepath('tmp/harmony_search_errors.txt'), 'a') as f:
            f.write(json.dumps([time(), 'error', str(e), denumpyize(config)]) + '\n')
    return (pa_closeness_long, pa_closeness_shrt, adg)
Esempio n. 4
0
 def __init__(self, pool, base_config):
     self.pool = pool
     self.base_config = base_config
     self.xs_conf_map = [k for k in sorted(base_config['ranges'])]
     self.bounds = numpyize([[self.base_config['ranges'][k][0] for k in self.xs_conf_map],
                             [self.base_config['ranges'][k][1] for k in self.xs_conf_map]])
     self.now_date = ts_to_date(time())[:19].replace(':', '-')
     self.test_symbol = base_config['symbols'][0]
     self.results_fname = make_get_filepath(f'tmp/harmony_search_results_{self.test_symbol}_{self.now_date}.txt')
     self.best_conf_fname = f'tmp/harmony_search_best_config_{self.test_symbol}_{self.now_date}.json'
Esempio n. 5
0
    def __init__(self, config: dict):
        self.spot = False
        self.config = config
        self.config['do_long'] = config['long']['enabled']
        self.config['do_shrt'] = config['shrt']['enabled']
        self.config['max_leverage'] = 25
        self.xk = {}

        self.ws = None

        self.hedge_mode = self.config['hedge_mode'] = True
        self.set_config(self.config)

        self.ts_locked = {
            k: 0.0
            for k in [
                'cancel_orders', 'update_open_orders', 'cancel_and_create',
                'update_position', 'print', 'create_orders', 'check_fills',
                'update_fills', 'force_update'
            ]
        }
        self.ts_released = {k: 1.0 for k in self.ts_locked}
        self.heartbeat_ts = 0
        self.listen_key = None

        self.position = {}
        self.open_orders = []
        self.fills = []
        self.price = 0.0
        self.ob = [0.0, 0.0]

        self.n_orders_per_execution = 2
        self.delay_between_executions = 2
        self.force_update_interval = 30

        self.c_mult = self.config['c_mult'] = 1.0

        self.log_filepath = make_get_filepath(
            f"logs/{self.exchange}/{config['config_name']}.log")

        _, self.key, self.secret = load_exchange_key_secret(self.user)

        self.log_level = 0

        self.user_stream_task = None
        self.market_stream_task = None

        self.stop_websocket = False
        self.process_websocket_ticks = True
Esempio n. 6
0
async def main():
    parser = argparse.ArgumentParser(prog='Optimize', description='Optimize passivbot config.')
    parser = add_argparse_args(parser)
    parser.add_argument('-t', '--start', type=str, required=False, dest='starting_configs',
                        default=None,
                        help='start with given live configs.  single json file or dir with multiple json files')
    args = parser.parse_args()
    for config in await prep_config(args):
        try:
            template_live_config = get_template_live_config(config['n_spans'])
            config = {**template_live_config, **config}
            dl = Downloader(config)
            data = await dl.get_sampled_ticks()
            shm = shared_memory.SharedMemory(create=True, size=data.nbytes)
            shdata = np.ndarray(data.shape, dtype=data.dtype, buffer=shm.buf)
            shdata[:] = data
            del data
            config['n_days'] = (shdata[-1][0] - shdata[0][0]) / (1000 * 60 * 60 * 24)
            config['optimize_dirpath'] = make_get_filepath(os.path.join(config['optimize_dirpath'],
                                                                        ts_to_date(time())[:19].replace(':', ''), ''))

            print()
            for k in (keys := ['exchange', 'symbol', 'starting_balance', 'start_date', 'end_date', 'latency_simulation_ms',
                               'do_long', 'do_shrt', 'minimum_bankruptcy_distance', 'maximum_hrs_no_fills',
                               'maximum_hrs_no_fills_same_side', 'iters', 'n_particles', 'sliding_window_size',
                               'n_spans']):
                if k in config:
                    print(f"{k: <{max(map(len, keys)) + 2}} {config[k]}")
            print()

            backtest_wrap = BacktestWrap(shdata, config)
            post_processing = PostProcessing()
            if config['starting_configs']:
                starting_configs = get_starting_configs(config)
                initial_positions = [backtest_wrap.config_to_xs(cfg) for cfg in starting_configs]
            else:
                initial_positions = []
            pso_multiprocess(backtest_wrap.rf,
                             config['n_particles'],
                             backtest_wrap.bounds,
                             config['options']['c1'],
                             config['options']['c2'],
                             config['options']['w'],
                             n_cpus=config['num_cpus'],
                             iters=config['iters'],
                             initial_positions=initial_positions,
                             post_processing_func=post_processing.process)
        finally:
Esempio n. 7
0
def load_hlc_cache(symbol,
                   start_date,
                   end_date,
                   base_dir="backtests",
                   spot=False):
    cache_fname = (
        f"{ts_to_date_utc(date_to_ts(start_date))[:10]}_" +
        f"{ts_to_date_utc(date_to_ts(end_date))[:10]}_ohlcv_cache.npy")

    filepath = make_get_filepath(
        os.path.join(base_dir, "binance" + ("_spot" if spot else ""), symbol,
                     "caches", cache_fname))
    if os.path.exists(filepath):
        return np.load(filepath)
    df = download_ohlcvs(symbol, start_date, end_date)
    df = df[df.timestamp >= date_to_ts(start_date)]
    df = df[df.timestamp <= date_to_ts(end_date)]
    data = df[["timestamp", "high", "low", "close"]].values
    np.save(filepath, data)
    return data
Esempio n. 8
0
def plot_wrap(config, data):
    print('n_days', round_(config['n_days'], 0.1))
    print('starting_balance', config['starting_balance'])
    print('backtesting...')
    sts = time()
    fills, stats = backtest(config, data, do_print=True)
    print(f'{time() - sts:.2f} seconds elapsed')
    if not fills:
        print('no fills')
        return
    fdf, sdf, result = analyze_fills(fills, stats, config)
    config['result'] = result
    config['plots_dirpath'] = make_get_filepath(os.path.join(
        config['plots_dirpath'], f"{ts_to_date(time())[:19].replace(':', '')}", '')
    )
    fdf.to_csv(config['plots_dirpath'] + "fills.csv")
    sdf.to_csv(config['plots_dirpath'] + "stats.csv")
    df = pd.DataFrame({**{'timestamp': data[:, 0], 'qty': data[:, 1], 'price': data[:, 2]},
                       **{}})
    print('dumping plots...')
    dump_plots(config, fdf, sdf, df)
Esempio n. 9
0
def main():
    tokens = [
        'BTS', 'LTC', 'STORJ', 'BAT', 'DASH', 'SOL', 'AVAX', 'LUNA', 'DYDX',
        'COMP', 'FIL', 'LINK', 'MATIC', 'LIT', 'NEO', 'OMG', 'XRP', 'HBAR',
        'MANA', 'IOTA', 'ADA', 'QTUM', 'SXP', 'XEM', 'EOS', 'XMR', 'ETC',
        'XLM', 'MKR', 'BNB', 'AAVE', 'ALGO', 'TRX', 'ZEC', 'XTZ', 'BCH'
    ]
    start_from = 'BTS'
    symbols = tokens[tokens.index(start_from):] + tokens[:tokens.
                                                         index(start_from)]

    quote = 'USDT'
    cfgs_dir = make_get_filepath('cfgs_batch_optimize/')
    exchange = 'binance'

    symbols = [e + quote for e in symbols]
    kwargs_list = [
        {
            'start': cfgs_dir,
            'symbol': symbol,
            #'starting_balance': 10000.0,
            #'end_date': '2021-09-20T15:00',
            #'start_date': '2021-03-01',
        } for symbol in symbols
    ]
    for kwargs in kwargs_list:
        formatted = f"python3 optimize.py "
        for key in kwargs:
            formatted += f' --{key} {kwargs[key]}'
        print(formatted)
        subprocess.run([formatted], shell=True)
        try:
            d = f'backtests/{exchange}/{kwargs["symbol"]}/plots/'
            ds = sorted([f for f in os.listdir(d) if '20' in f])
            for d1 in ds:
                print(f'copying resulting config to {cfgs_dir}', d + d1)
                shutil.copy(d + d1 + '/live_config.json',
                            f'{cfgs_dir}{kwargs["symbol"]}_{d1}.json')
        except Exception as e:
            print('error', kwargs['symbol'], e)
Esempio n. 10
0
async def main():
    logging.basicConfig(
        format="%(asctime)s %(levelname)-8s %(message)s",
        level=logging.INFO,
        datefmt="%Y-%m-%dT%H:%M:%S",
    )
    parser = argparse.ArgumentParser(
        prog="auto profit transfer",
        description=
        "automatically transfer percentage of profits from futures wallet to spot wallet",
    )
    parser.add_argument("user",
                        type=str,
                        help="user/account_name defined in api-keys.json")
    parser.add_argument(
        "-p",
        "--percentage",
        type=float,
        required=False,
        default=0.5,
        dest="percentage",
        help="per uno, i.e. 0.02==2%.  default=0.5",
    )
    args = parser.parse_args()
    config = get_template_live_config()
    config["user"] = args.user
    config["symbol"] = "BTCUSDT"  # dummy symbol
    config["market_type"] = "futures"
    bot = await create_binance_bot(config)
    transfer_log_fpath = make_get_filepath(
        os.path.join("logs",
                     f"automatic_profit_transfer_log_{config['user']}.json"))
    try:
        already_transferred_ids = set(json.load(open(transfer_log_fpath)))
        logging.info(f"loaded already transferred IDs: {transfer_log_fpath}")
    except:
        already_transferred_ids = set()
        logging.info(f"no previous transfers to load")
    while True:
        now = (await bot.public_get(bot.endpoints["time"]))["serverTime"]
        try:
            income = await bot.get_all_income(start_time=now -
                                              1000 * 60 * 60 * 24)
        except Exception as e:
            logging.error(f"failed fetching income {e}")
            traceback.print_exc()
            income = []
        income = [
            e for e in income
            if e["transaction_id"] not in already_transferred_ids
        ]
        profit = sum([e["income"] for e in income])
        to_transfer = round_dynamic(profit * args.percentage, 4)
        if to_transfer > 0:
            try:
                transferred = await bot.private_post(
                    bot.endpoints["futures_transfer"],
                    {
                        "asset": "USDT",
                        "amount": to_transfer,
                        "type": 2
                    },
                    base_endpoint=bot.spot_base_endpoint,
                )
                logging.info(
                    f"income: {profit} transferred {to_transfer} USDT")
                already_transferred_ids.update(
                    [e["transaction_id"] for e in income])
                json.dump(list(already_transferred_ids),
                          open(transfer_log_fpath, "w"))
            except Exception as e:
                logging.error(f"failed transferring {e}")
                traceback.print_exc()
        else:
            logging.info("nothing to transfer")
        sleep(60 * 60)
Esempio n. 11
0
def backtest_wrap(config_: dict, ticks_caches: dict):
    """
    loads historical data from disk, runs backtest and returns relevant metrics
    """
    config = {
        **{
            "long": deepcopy(config_["long"]),
            "short": deepcopy(config_["short"])
        },
        **{
            k: config_[k]
            for k in [
                "starting_balance",
                "latency_simulation_ms",
                "symbol",
                "market_type",
                "config_no",
            ]
        },
        **{k: v
           for k, v in config_["market_specific_settings"].items()},
    }
    if config["symbol"] in ticks_caches:
        ticks = ticks_caches[config["symbol"]]
    else:
        ticks = np.load(config_["ticks_cache_fname"])
    try:
        fills_long, fills_short, stats = backtest(config, ticks)
        longs, shorts, sdf, analysis = analyze_fills(fills_long, fills_short,
                                                     stats, config)
        pa_distance_mean_long = analysis["pa_distance_mean_long"]
        pa_distance_mean_short = analysis["pa_distance_mean_short"]
        PAD_std_long = analysis["pa_distance_std_long"]
        PAD_std_short = analysis["pa_distance_std_short"]
        adg_long = analysis["adg_long"]
        adg_short = analysis["adg_short"]
        adg_DGstd_ratio_long = analysis["adg_DGstd_ratio_long"]
        adg_DGstd_ratio_short = analysis["adg_DGstd_ratio_short"]
        """
        with open("logs/debug_harmonysearch.txt", "a") as f:
            f.write(json.dumps({"config": denumpyize(config), "analysis": analysis}) + "\n")
        """
        logging.debug(
            f"backtested {config['symbol']: <12} pa distance long {pa_distance_mean_long:.6f} "
            +
            f"pa distance short {pa_distance_mean_short:.6f} adg long {adg_long:.6f} "
            + f"adg short {adg_short:.6f} std long {PAD_std_long:.5f} " +
            f"std short {PAD_std_short:.5f}")
    except Exception as e:
        logging.error(f'error with {config["symbol"]} {e}')
        logging.error("config")
        traceback.print_exc()
        adg_long = adg_short = adg_DGstd_ratio_long = adg_DGstd_ratio_short = 0.0
        pa_distance_mean_long = pa_distance_mean_short = PAD_std_long = PAD_std_short = 100.0
        with open(make_get_filepath("tmp/harmony_search_errors.txt"),
                  "a") as f:
            f.write(
                json.dumps([time(), "error",
                            str(e),
                            denumpyize(config)]) + "\n")
    return {
        "pa_distance_mean_long": pa_distance_mean_long,
        "pa_distance_mean_short": pa_distance_mean_short,
        "adg_DGstd_ratio_long": adg_DGstd_ratio_long,
        "adg_DGstd_ratio_short": adg_DGstd_ratio_short,
        "pa_distance_std_long": PAD_std_long,
        "pa_distance_std_short": PAD_std_short,
        "adg_long": adg_long,
        "adg_short": adg_short,
    }
Esempio n. 12
0
    def __init__(self, config: dict):
        self.config = config
        self.do_long = config["long"]["enabled"]
        self.do_short = config["short"]["enabled"]
        self.n_harmonies = max(config["n_harmonies"],
                               len(config["starting_configs"]))
        self.starting_configs = config["starting_configs"]
        self.hm_considering_rate = config["hm_considering_rate"]
        self.bandwidth = config["bandwidth"]
        self.pitch_adjusting_rate = config["pitch_adjusting_rate"]
        self.iters = config["iters"]
        self.n_cpus = config["n_cpus"]
        self.pool = Pool(processes=config["n_cpus"])
        self.long_bounds = sort_dict_keys(
            config[f"bounds_{self.config['passivbot_mode']}"]["long"])
        self.short_bounds = sort_dict_keys(
            config[f"bounds_{self.config['passivbot_mode']}"]["short"])
        self.symbols = config["symbols"]
        self.identifying_name = (f"{len(self.symbols)}_symbols"
                                 if len(self.symbols) > 1 else self.symbols[0])
        self.now_date = ts_to_date(time())[:19].replace(":", "-")
        self.results_fpath = make_get_filepath(
            f"results_harmony_search_{self.config['passivbot_mode']}/{self.now_date}_{self.identifying_name}/"
        )
        self.exchange_name = config["exchange"] + (
            "_spot" if config["market_type"] == "spot" else "")
        self.market_specific_settings = {
            s: json.load(
                open(
                    f"backtests/{self.exchange_name}/{s}/caches/market_specific_settings.json"
                ))
            for s in self.symbols
        }
        self.date_range = f"{self.config['start_date']}_{self.config['end_date']}"
        self.bt_dir = f"backtests/{self.exchange_name}"
        self.ticks_cache_fname = (
            f"caches/{self.date_range}{'_ohlcv_cache.npy' if config['ohlcv'] else '_ticks_cache.npy'}"
        )
        """
        self.ticks_caches = (
            {s: np.load(f"{self.bt_dir}/{s}/{self.ticks_cache_fname}") for s in self.symbols}
            if self.n_harmonies > len(self.symbols)
            else {}
        )
        """
        self.ticks_caches = {}
        self.shms = {}  # shared memories
        self.current_best_config = None

        # [{'config': dict, 'task': process, 'id_key': tuple}]
        self.workers = [None for _ in range(self.n_cpus)]

        # hm = {hm_key: str: {'long': {'score': float, 'config': dict}, 'short': {...}}}
        self.hm = {}

        # {identifier: {'config': dict,
        #               'single_results': {symbol_finished: single_backtest_result},
        #               'in_progress': set({symbol_in_progress}))}
        self.unfinished_evals = {}

        self.iter_counter = 0
Esempio n. 13
0
def download_ohlcvs(symbol,
                    start_date,
                    end_date,
                    download_only=False) -> pd.DataFrame:
    dirpath = make_get_filepath(f"historical_data/ohlcvs_futures/{symbol}/")
    base_url = f"https://data.binance.vision/data/futures/um/"
    col_names = ["timestamp", "open", "high", "low", "close", "volume"]
    start_ts = date_to_ts(start_date)
    end_ts = date_to_ts(end_date)
    days = [
        ts_to_date_utc(x)[:10]
        for x in list(range(start_ts, end_ts, 1000 * 60 * 60 * 24))
    ]
    months = sorted(set([x[:7] for x in days]))
    months_done = set()
    dfs = []
    for month in months:
        month_filepath = dirpath + month + ".csv"
        if os.path.exists(month_filepath):
            months_done.add(month)
            if not download_only:
                dfs.append(pd.read_csv(month_filepath))
            continue
        try:
            url = base_url + f"monthly/klines/{symbol}/1m/{symbol}-1m-{month}.zip"
            print("fetching", url)
            csv = get_zip(url)
            csv.to_csv(month_filepath)
            months_done.add(month)
            if not download_only:
                dfs.append(csv)
            for f in os.listdir(dirpath):
                if month in f and len(f) > 11:
                    print("deleting", dirpath + f)
                    os.remove(dirpath + f)
        except Exception as e:
            if month != months[-1]:
                months_done.add(month)
            print(e)
    for day in days:
        if day[:7] in months_done:
            continue
        day_filepath = dirpath + day + ".csv"
        if os.path.exists(day_filepath):
            if not download_only:
                dfs.append(pd.read_csv(day_filepath))
            continue
        try:
            print("fetching", day_filepath)
            csv = get_zip(base_url +
                          f"daily/klines/{symbol}/1m/{symbol}-1m-{day}.zip")
            csv.to_csv(day_filepath)
            if not download_only:
                dfs.append(csv)
        except Exception as e:
            print(e)
            break
    if not download_only:
        df = pd.concat(dfs)[col_names].sort_values("timestamp")
        df = df.drop_duplicates(subset=["timestamp"]).reset_index()
        nindex = np.arange(df.timestamp.iloc[0], df.timestamp.iloc[-1] + 60000,
                           60000)
        return (df[col_names].set_index("timestamp").reindex(nindex).fillna(
            method="ffill").reset_index())
Esempio n. 14
0
         "PAD_std_mean_raw": PAD_std_mean_raw,
         "PAD_mean_mean": PAD_mean_mean,
         "PAD_mean_mean_raw": PAD_mean_mean_raw,
         "score": score,
         "adg_DGstd_ratios_mean": adg_DGstd_ratios_mean,
         "adg_DGstd_ratios_std": adg_DGstd_ratios_std,
         "config_no": r["results"]["config_no"],
     })
 ss = sorted(stats, key=lambda x: x["score"])
 bc = ss[-args.index]
 live_config = candidate_to_live_config(bc["config"])
 if args.dump_live_config:
     print("dump_live_config")
     dump_live_config(
         live_config,
         make_get_filepath(
             f"{args.results_fpath.replace('.txt', '_config.json')}"))
 print(config_pretty_str(live_config))
 pprint.pprint({k: v for k, v in bc.items() if k != "config"})
 for r in results:
     if r["results"]["config_no"] == bc["config_no"]:
         rs = r["results"]
         syms = [s for s in rs if "config" not in s]
         print("symbol               adg      PADmean  PADstd   adg/DGstd")
         for s in sorted(syms, key=lambda x: rs[x][f"adg_{side}"]):
             print(
                 f"{s: <20} {rs[s][f'adg_{side}'] / bc['config'][side]['wallet_exposure_limit']:.6f} "
                 +
                 f"{rs[s][f'pa_distance_std_{side}']:.6f} {rs[s][f'pa_distance_mean_{side}']:.6f} "
                 + f"{rs[s][f'adg_DGstd_ratio_{side}']:.6f} ")
         print(
             f"{'means': <20} {bc['adg_mean'] / bc['config'][side]['wallet_exposure_limit']:.6f} "
Esempio n. 15
0
    def __init__(self, config: dict):
        self.spot = False
        self.config = config
        self.config["do_long"] = config["long"]["enabled"]
        self.config["do_short"] = config["short"]["enabled"]
        self.config["max_leverage"] = 25
        self.xk = {}

        self.ws = None

        self.hedge_mode = self.config["hedge_mode"] = True
        self.set_config(self.config)

        self.ts_locked = {
            k: 0.0
            for k in [
                "cancel_orders",
                "update_open_orders",
                "cancel_and_create",
                "update_position",
                "print",
                "create_orders",
                "check_fills",
                "update_fills",
                "force_update",
            ]
        }
        self.ts_released = {k: 1.0 for k in self.ts_locked}
        self.error_halt = {
            "update_open_orders": False,
            "update_fills": False,
            "update_position": False,
        }
        self.heartbeat_ts = 0
        self.listen_key = None

        self.position = {}
        self.open_orders = []
        self.fills = []
        self.price = 0.0
        self.ob = [0.0, 0.0]
        self.emas_long = np.zeros(3)
        self.emas_short = np.zeros(3)
        self.ema_sec = 0

        self.n_orders_per_execution = 2
        self.delay_between_executions = 3
        self.force_update_interval = 30

        self.c_mult = self.config["c_mult"] = 1.0

        self.log_filepath = make_get_filepath(
            f"logs/{self.exchange}/{config['config_name']}.log")

        _, self.key, self.secret = load_exchange_key_secret(self.user)

        self.log_level = 0

        self.user_stream_task = None
        self.market_stream_task = None

        self.stop_websocket = False
        self.process_websocket_ticks = True