Пример #1
0
def backtest_tune(ticks: np.ndarray, backtest_config: dict, current_best: Union[dict, list] = None):
    config = create_config(backtest_config)
    n_days = round_((ticks[-1][2] - ticks[0][2]) / (1000 * 60 * 60 * 24), 0.1)
    session_dirpath = make_get_filepath(os.path.join('reports', backtest_config['exchange'], backtest_config['symbol'],
                                                     f"{n_days}_days_{ts_to_date(time())[:19].replace(':', '')}", ''))
    iters = 10
    if 'iters' in backtest_config:
        iters = backtest_config['iters']
    else:
        print('Parameter iters should be defined in the configuration. Defaulting to 10.')
    num_cpus = 2
    if 'num_cpus' in backtest_config:
        num_cpus = backtest_config['num_cpus']
    else:
        print('Parameter num_cpus should be defined in the configuration. Defaulting to 2.')
    n_particles = 10
    if 'n_particles' in backtest_config:
        n_particles = backtest_config['n_particles']
    phi1 = 1.4962
    phi2 = 1.4962
    omega = 0.7298
    if 'options' in backtest_config:
        phi1 = backtest_config['options']['c1']
        phi2 = backtest_config['options']['c2']
        omega = backtest_config['options']['w']
    current_best_params = []
    if current_best:
        if type(current_best) == list:
            for c in current_best:
                c = clean_start_config(c, config, backtest_config['ranges'])
                current_best_params.append(c)
        else:
            current_best = clean_start_config(current_best, config, backtest_config['ranges'])
            current_best_params.append(current_best)

    ray.init(num_cpus=num_cpus, logging_level=logging.FATAL, log_to_driver=False)
    pso = ng.optimizers.ConfiguredPSO(transform='identity', popsize=n_particles, omega=omega, phip=phi1, phig=phi2)
    algo = NevergradSearch(optimizer=pso, points_to_evaluate=current_best_params)
    algo = ConcurrencyLimiter(algo, max_concurrent=num_cpus)
    scheduler = AsyncHyperBandScheduler()

    analysis = tune.run(tune.with_parameters(backtest, ticks=ticks), metric='objective', mode='max', name='search',
                        search_alg=algo, scheduler=scheduler, num_samples=iters, config=config, verbose=1,
                        reuse_actors=True, local_dir=session_dirpath,
                        progress_reporter=LogReporter(metric_columns=['daily_gain', 'closest_liquidation', 'objective'],
                                                      parameter_columns=[k for k in backtest_config['ranges']]))

    ray.shutdown()
    df = analysis.results_df
    df.reset_index(inplace=True)
    df.drop(columns=['trial_id', 'time_this_iter_s', 'done', 'timesteps_total', 'episodes_total', 'training_iteration',
                     'experiment_id', 'date', 'timestamp', 'time_total_s', 'pid', 'hostname', 'node_ip',
                     'time_since_restore', 'timesteps_since_restore', 'iterations_since_restore', 'experiment_tag'],
            inplace=True)
    df.to_csv(os.path.join(backtest_config['session_dirpath'], 'results.csv'), index=False)
    print('Best candidate found:')
    pprint.pprint(analysis.best_config)
    plot_wrap(backtest_config, ticks, clean_result_config(analysis.best_config))
    return analysis
Пример #2
0
def backtest_tune(ticks: np.ndarray, backtest_config: dict, current_best: Union[dict, list] = None):
    config = create_config(backtest_config)
    n_days = round_((ticks[-1][2] - ticks[0][2]) / (1000 * 60 * 60 * 24), 0.1)
    session_dirpath = make_get_filepath(os.path.join('reports', backtest_config['exchange'], backtest_config['symbol'],
                                                     f"{n_days}_days_{ts_to_date(time())[:19].replace(':', '')}", ''))
    iters = 10
    if 'iters' in backtest_config:
        iters = backtest_config['iters']
    else:
        print('Parameter iters should be defined in the configuration. Defaulting to 10.')
    num_cpus = 2
    if 'num_cpus' in backtest_config:
        num_cpus = backtest_config['num_cpus']
    else:
        print('Parameter num_cpus should be defined in the configuration. Defaulting to 2.')
    n_particles = 10
    if 'n_particles' in backtest_config:
        n_particles = backtest_config['n_particles']
    phi1 = 1.4962
    phi2 = 1.4962
    omega = 0.7298
    if 'options' in backtest_config:
        phi1 = backtest_config['options']['c1']
        phi2 = backtest_config['options']['c2']
        omega = backtest_config['options']['w']
    current_best_params = []
    if current_best:
        if type(current_best) == list:
            for c in current_best:
                c = clean_start_config(c, config, backtest_config['ranges'])
                current_best_params.append(c)
        else:
            current_best = clean_start_config(current_best, config, backtest_config['ranges'])
            current_best_params.append(current_best)

    ray.init(num_cpus=num_cpus, logging_level=logging.FATAL, log_to_driver=False)
    pso = ng.optimizers.ConfiguredPSO(transform='identity', popsize=n_particles, omega=omega, phip=phi1, phig=phi2)
    algo = NevergradSearch(optimizer=pso, points_to_evaluate=current_best_params)
    algo = ConcurrencyLimiter(algo, max_concurrent=num_cpus)
    scheduler = AsyncHyperBandScheduler()

    analysis = tune.run(tune.with_parameters(wrap_backtest, ticks=ticks), metric='objective', mode='max', name='search',
                        search_alg=algo, scheduler=scheduler, num_samples=iters, config=config, verbose=1,
                        reuse_actors=True, local_dir=session_dirpath,
                        progress_reporter=LogReporter(metric_columns=['daily_gain',
                                                                      'closest_liquidation',
                                                                      'max_hours_between_fills',
                                                                      'objective'],
                                                      parameter_columns=[k for k in backtest_config['ranges'] if type(
                                                          config[k]) == ray.tune.sample.Float or type(
                                                          config[k]) == ray.tune.sample.Integer]))

    ray.shutdown()
    return analysis
Пример #3
0
def backtest_tune(ticks: np.ndarray,
                  backtest_config: dict,
                  current_best: Union[dict, list] = None):
    config = create_config(backtest_config)
    n_days = round_((ticks[-1][2] - ticks[0][2]) / (1000 * 60 * 60 * 24), 0.1)
    backtest_config['optimize_dirpath'] = os.path.join(
        backtest_config['optimize_dirpath'],
        ts_to_date(time())[:19].replace(':', ''), '')
    if 'iters' in backtest_config:
        iters = backtest_config['iters']
    else:
        print(
            'Parameter iters should be defined in the configuration. Defaulting to 10.'
        )
        iters = 10
    if 'num_cpus' in backtest_config:
        num_cpus = backtest_config['num_cpus']
    else:
        print(
            'Parameter num_cpus should be defined in the configuration. Defaulting to 2.'
        )
        num_cpus = 2
    n_particles = backtest_config[
        'n_particles'] if 'n_particles' in backtest_config else 10
    phi1 = 1.4962
    phi2 = 1.4962
    omega = 0.7298
    if 'options' in backtest_config:
        phi1 = backtest_config['options']['c1']
        phi2 = backtest_config['options']['c2']
        omega = backtest_config['options']['w']
    current_best_params = []
    if current_best:
        if type(current_best) == list:
            for c in current_best:
                c = clean_start_config(c, config, backtest_config['ranges'])
                if c not in current_best_params:
                    current_best_params.append(c)
        else:
            current_best = clean_start_config(current_best, config,
                                              backtest_config['ranges'])
            current_best_params.append(current_best)

    ray.init(num_cpus=num_cpus,
             logging_level=logging.FATAL,
             log_to_driver=False)
    pso = ng.optimizers.ConfiguredPSO(transform='identity',
                                      popsize=n_particles,
                                      omega=omega,
                                      phip=phi1,
                                      phig=phi2)
    algo = NevergradSearch(optimizer=pso,
                           points_to_evaluate=current_best_params)
    algo = ConcurrencyLimiter(algo, max_concurrent=num_cpus)
    scheduler = AsyncHyperBandScheduler()

    if 'wfo' in config and config['wfo']:
        print('\n\nwalk forward optimization\n\n')
        wfo = WFO(ticks, backtest_config, P_train=0.5).set_train_N(4)
        backtest_wrap = lambda config: tune_report(wfo.backtest(config))
    else:
        print('\n\nsimple sliding window optimization\n\n')
        backtest_wrap = tune.with_parameters(simple_sliding_window_wrap,
                                             ticks=ticks)
    analysis = tune.run(backtest_wrap,
                        metric='objective',
                        mode='max',
                        name='search',
                        search_alg=algo,
                        scheduler=scheduler,
                        num_samples=iters,
                        config=config,
                        verbose=1,
                        reuse_actors=True,
                        local_dir=backtest_config['optimize_dirpath'],
                        progress_reporter=LogReporter(
                            metric_columns=[
                                'daily_gain', 'closest_liquidation',
                                'max_hrs_no_fills',
                                'max_hrs_no_fills_same_side', 'objective'
                            ],
                            parameter_columns=[
                                k for k in backtest_config['ranges']
                                if type(config[k]) == ray.tune.sample.Float
                                or type(config[k]) == ray.tune.sample.Integer
                            ]),
                        raise_on_failed_trial=False)
    ray.shutdown()
    return analysis
Пример #4
0
def backtest_tune(data: np.ndarray,
                  config: dict,
                  current_best: Union[dict, list] = None):
    memory = int(sys.getsizeof(data) * 1.2)
    virtual_memory = psutil.virtual_memory()
    print(f'data size in mb {memory / (1000 * 1000):.4f}')
    if (virtual_memory.available - memory) / virtual_memory.total < 0.1:
        print(
            "Available memory would drop below 10%. Please reduce the time span."
        )
        return None
    config = create_config(config)
    print('tuning:')
    for k, v in config.items():
        if type(v) in [ray.tune.sample.Float, ray.tune.sample.Integer]:
            print(k, (v.lower, v.upper))
    phi1 = 1.4962
    phi2 = 1.4962
    omega = 0.7298
    if 'options' in config:
        phi1 = config['options']['c1']
        phi2 = config['options']['c2']
        omega = config['options']['w']
    current_best_params = []
    if current_best is not None:
        if type(current_best) == list:
            for c in current_best:
                c = clean_start_config(c, config)
                if c not in current_best_params:
                    current_best_params.append(c)
        else:
            current_best = clean_start_config(current_best, config)
            current_best_params.append(current_best)

    ray.init(num_cpus=config['num_cpus'],
             object_store_memory=memory if memory > 4000000000 else
             None)  # , logging_level=logging.FATAL, log_to_driver=False)
    pso = ng.optimizers.ConfiguredPSO(transform='identity',
                                      popsize=config['n_particles'],
                                      omega=omega,
                                      phip=phi1,
                                      phig=phi2)
    algo = NevergradSearch(optimizer=pso,
                           points_to_evaluate=current_best_params)
    algo = ConcurrencyLimiter(algo, max_concurrent=config['num_cpus'])
    scheduler = AsyncHyperBandScheduler()

    print('\n\nsimple sliding window optimization\n\n')

    parameter_columns = []
    for side in ['long', 'shrt']:
        if config[f'{side}£enabled']:
            parameter_columns.append(f'{side}£grid_span')
            parameter_columns.append(f'{side}£eprice_pprice_diff')
            parameter_columns.append(f'{side}£eprice_exp_base')
            parameter_columns.append(f'{side}£secondary_pprice_diff')
            parameter_columns.append(f'{side}£min_markup')

    backtest_wrap = tune.with_parameters(
        simple_sliding_window_wrap,
        data=data,
        do_print=(config['print_slice_progress']
                  if 'print_slice_progress' in config else False))
    analysis = tune.run(
        backtest_wrap,
        metric='obj',
        mode='max',
        name='search',
        search_alg=algo,
        scheduler=scheduler,
        num_samples=config['iters'],
        config=config,
        verbose=1,
        reuse_actors=True,
        local_dir=config['optimize_dirpath'],
        progress_reporter=LogReporter(metric_columns=[
            'min_adg', 'avg_adg', 'min_bkr', 'eqbal_ratio_min',
            'hrs_stuck_max_l', 'hrs_stuck_max_s', 'pac_mean_l', 'pac_mean_s',
            'n_slc', 'obj'
        ],
                                      parameter_columns=parameter_columns,
                                      max_report_frequency=30),
        raise_on_failed_trial=False)
    ray.shutdown()
    print('\nCleaning up temporary optimizer data...\n')
    try:
        shutil.rmtree(os.path.join(config['optimize_dirpath'], 'search'))
    except Exception as e:
        print('Failed cleaning up.')
        print(e)
    return analysis