def simple_backtest(config, contour, num_results): data = load_data_test(contour) processed = optimize.preprocess(data) assert isinstance(processed, dict) results = backtest(config['stake_amount'], processed, 1, True) # results :: <class 'pandas.core.frame.DataFrame'> assert len(results) == num_results
def test_backtest(default_conf, mocker): mocker.patch.dict('freqtrade.main._CONF', default_conf) exchange._API = Bittrex({'key': '', 'secret': ''}) data = optimize.load_data(None, ticker_interval=5, pairs=['BTC_ETH']) results = backtest(default_conf['stake_amount'], optimize.preprocess(data), 10, True) assert not results.empty
def test_backtest_1min_ticker_interval(default_conf, mocker): mocker.patch.dict('freqtrade.main._CONF', default_conf) exchange._API = Bittrex({'key': '', 'secret': ''}) # Run a backtesting for an exiting 5min ticker_interval data = optimize.load_data(None, ticker_interval=1, pairs=['BTC_UNITEST']) results = backtest(default_conf['stake_amount'], optimize.preprocess(data), 1, True) assert not results.empty
def start(args): # Initialize logger logging.basicConfig( level=args.loglevel, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', ) exchange._API = Bittrex({'key': '', 'secret': ''}) logger.info('Using config: %s ...', args.config) config = misc.load_config(args.config) logger.info('Using ticker_interval: %s ...', args.ticker_interval) data = {} pairs = config['exchange']['pair_whitelist'] if args.live: logger.info('Downloading data for all pairs in whitelist ...') for pair in pairs: data[pair] = exchange.get_ticker_history(pair, args.ticker_interval) else: logger.info('Using local backtesting data (using whitelist in given config) ...') data = optimize.load_data(args.datadir, pairs=pairs, ticker_interval=args.ticker_interval, refresh_pairs=args.refresh_pairs) logger.info('Using stake_currency: %s ...', config['stake_currency']) logger.info('Using stake_amount: %s ...', config['stake_amount']) max_open_trades = 0 if args.realistic_simulation: logger.info('Using max_open_trades: %s ...', config['max_open_trades']) max_open_trades = config['max_open_trades'] # Monkey patch config from freqtrade import main main._CONF = config preprocessed = preprocess(data) # Print timeframe min_date, max_date = get_timeframe(preprocessed) logger.info('Measuring data from %s up to %s ...', min_date.isoformat(), max_date.isoformat()) # Execute backtest and print results results = backtest( stake_amount=config['stake_amount'], processed=preprocessed, max_open_trades=max_open_trades, realistic=args.realistic_simulation, sell_profit_only=config.get('experimental', {}).get('sell_profit_only', False), stoploss=config.get('stoploss'), use_sell_signal=config.get('experimental', {}).get('use_sell_signal', False) ) logger.info( '\n==================================== BACKTESTING REPORT ====================================\n%s', # noqa generate_text_table(data, results, config['stake_currency'], args.ticker_interval) )
def test_processed(default_conf, mocker): mocker.patch.dict('freqtrade.main._CONF', default_conf) dict_of_tickerrows = load_data_test('raise') dataframes = optimize.preprocess(dict_of_tickerrows) dataframe = dataframes['BTC_UNITEST'] cols = dataframe.columns # assert the dataframe got some of the indicator columns for col in ['close', 'high', 'low', 'open', 'date', 'ema50', 'ao', 'macd', 'plus_dm']: assert col in cols
def test_processed(default_conf, mocker): mocker.patch.dict('freqtrade.main._CONF', default_conf) dict_of_tickerrows = load_data_test('raise') dataframes = optimize.preprocess(dict_of_tickerrows) dataframe = dataframes['BTC_UNITEST'] cols = dataframe.columns # assert the dataframe got some of the indicator columns for col in [ 'close', 'high', 'low', 'open', 'date', 'ema50', 'ao', 'macd', 'plus_dm' ]: assert col in cols
def start(args): global TOTAL_TRIES, PROCESSED, SPACE, TRIALS, _CURRENT_TRIES TOTAL_TRIES = args.epochs exchange._API = Bittrex({'key': '', 'secret': ''}) # Initialize logger logging.basicConfig( level=args.loglevel, format='\n%(message)s', ) logger.info('Using config: %s ...', args.config) config = load_config(args.config) pairs = config['exchange']['pair_whitelist'] PROCESSED = optimize.preprocess( optimize.load_data(args.datadir, pairs=pairs, ticker_interval=args.ticker_interval)) if args.mongodb: logger.info('Using mongodb ...') logger.info( 'Start scripts/start-mongodb.sh and start-hyperopt-worker.sh manually!' ) db_name = 'freqtrade_hyperopt' TRIALS = MongoTrials('mongo://127.0.0.1:1234/{}/jobs'.format(db_name), exp_key='exp1') else: logger.info('Preparing Trials..') signal.signal(signal.SIGINT, signal_handler) # read trials file if we have one if os.path.exists(TRIALS_FILE): TRIALS = read_trials() _CURRENT_TRIES = len(TRIALS.results) TOTAL_TRIES = TOTAL_TRIES + _CURRENT_TRIES logger.info( 'Continuing with trials. Current: {}, Total: {}'.format( _CURRENT_TRIES, TOTAL_TRIES)) try: best_parameters = fmin(fn=optimizer, space=SPACE, algo=tpe.suggest, max_evals=TOTAL_TRIES, trials=TRIALS) results = sorted(TRIALS.results, key=itemgetter('loss')) best_result = results[0]['result'] except ValueError: best_parameters = {} best_result = 'Sorry, Hyperopt was not able to find good parameters. Please ' \ 'try with more epochs (param: -e).' # Improve best parameter logging display if best_parameters: best_parameters = space_eval(SPACE, best_parameters) logger.info('Best parameters:\n%s', json.dumps(best_parameters, indent=4)) logger.info('Best Result:\n%s', best_result) # Store trials result to file to resume next time save_trials(TRIALS)
def test_get_timeframe(): data = preprocess(optimize.load_data( None, ticker_interval=1, pairs=['BTC_UNITEST'])) min_date, max_date = get_timeframe(data) assert min_date.isoformat() == '2017-11-04T23:02:00+00:00' assert max_date.isoformat() == '2017-11-14T22:59:00+00:00'
def start(args): # Initialize logger logging.basicConfig( level=args.loglevel, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', ) exchange._API = Bittrex({'key': '', 'secret': ''}) logger.info('Using config: %s ...', args.config) config = misc.load_config(args.config) logger.info('Using ticker_interval: %s ...', args.ticker_interval) data = {} pairs = config['exchange']['pair_whitelist'] if args.live: logger.info('Downloading data for all pairs in whitelist ...') for pair in pairs: data[pair] = exchange.get_ticker_history(pair, args.ticker_interval) else: logger.info( 'Using local backtesting data (using whitelist in given config) ...' ) data = optimize.load_data(args.datadir, pairs=pairs, ticker_interval=args.ticker_interval, refresh_pairs=args.refresh_pairs) logger.info('Using stake_currency: %s ...', config['stake_currency']) logger.info('Using stake_amount: %s ...', config['stake_amount']) max_open_trades = 0 if args.realistic_simulation: logger.info('Using max_open_trades: %s ...', config['max_open_trades']) max_open_trades = config['max_open_trades'] # Monkey patch config from freqtrade import main main._CONF = config preprocessed = preprocess(data) # Print timeframe min_date, max_date = get_timeframe(preprocessed) logger.info('Measuring data from %s up to %s ...', min_date.isoformat(), max_date.isoformat()) # Execute backtest and print results results = backtest( stake_amount=config['stake_amount'], processed=preprocessed, max_open_trades=max_open_trades, realistic=args.realistic_simulation, sell_profit_only=config.get('experimental', {}).get('sell_profit_only', False), stoploss=config.get('stoploss'), use_sell_signal=config.get('experimental', {}).get('use_sell_signal', False)) logger.info( '\n==================================== BACKTESTING REPORT ====================================\n%s', # noqa generate_text_table(data, results, config['stake_currency'], args.ticker_interval))
def start(args): global TOTAL_TRIES, PROCESSED, SPACE, TRIALS, _CURRENT_TRIES TOTAL_TRIES = args.epochs exchange._API = Bittrex({'key': '', 'secret': ''}) # Initialize logger logging.basicConfig( level=args.loglevel, format='\n%(message)s', ) logger.info('Using config: %s ...', args.config) config = load_config(args.config) pairs = config['exchange']['pair_whitelist'] PROCESSED = optimize.preprocess(optimize.load_data( args.datadir, pairs=pairs, ticker_interval=args.ticker_interval)) if args.mongodb: logger.info('Using mongodb ...') logger.info('Start scripts/start-mongodb.sh and start-hyperopt-worker.sh manually!') db_name = 'freqtrade_hyperopt' TRIALS = MongoTrials('mongo://127.0.0.1:1234/{}/jobs'.format(db_name), exp_key='exp1') else: logger.info('Preparing Trials..') signal.signal(signal.SIGINT, signal_handler) # read trials file if we have one if os.path.exists(TRIALS_FILE): TRIALS = read_trials() _CURRENT_TRIES = len(TRIALS.results) TOTAL_TRIES = TOTAL_TRIES + _CURRENT_TRIES logger.info( 'Continuing with trials. Current: {}, Total: {}' .format(_CURRENT_TRIES, TOTAL_TRIES)) try: best_parameters = fmin( fn=optimizer, space=SPACE, algo=tpe.suggest, max_evals=TOTAL_TRIES, trials=TRIALS ) results = sorted(TRIALS.results, key=itemgetter('loss')) best_result = results[0]['result'] except ValueError: best_parameters = {} best_result = 'Sorry, Hyperopt was not able to find good parameters. Please ' \ 'try with more epochs (param: -e).' # Improve best parameter logging display if best_parameters: best_parameters = space_eval(SPACE, best_parameters) logger.info('Best parameters:\n%s', json.dumps(best_parameters, indent=4)) logger.info('Best Result:\n%s', best_result) # Store trials result to file to resume next time save_trials(TRIALS)
def test_get_timeframe(): data = preprocess( optimize.load_data(None, ticker_interval=1, pairs=['BTC_UNITEST'])) min_date, max_date = get_timeframe(data) assert min_date.isoformat() == '2017-11-04T23:02:00+00:00' assert max_date.isoformat() == '2017-11-14T22:59:00+00:00'