def test_expand_pairlist_keep_invalid(wildcardlist, pairs, expected): if expected is None: with pytest.raises(ValueError, match=r'Wildcard error in \*UP/USDT,'): expand_pairlist(wildcardlist, pairs, keep_invalid=True) else: assert sorted(expand_pairlist(wildcardlist, pairs, keep_invalid=True)) == sorted(expected)
def _rpc_blacklist(self, add: List[str] = None) -> Dict: """ Returns the currently active blacklist""" errors = {} if add: for pair in add: if pair not in self._freqtrade.pairlists.blacklist: try: expand_pairlist( [pair], self._freqtrade.exchange.get_markets().keys()) self._freqtrade.pairlists.blacklist.append(pair) except ValueError: errors[pair] = { 'error_msg': f'Pair {pair} is not a valid wildcard.' } else: errors[pair] = { 'error_msg': f'Pair {pair} already in pairlist.' } res = { 'method': self._freqtrade.pairlists.name_list, 'length': len(self._freqtrade.pairlists.blacklist), 'blacklist': self._freqtrade.pairlists.blacklist, 'blacklist_expanded': self._freqtrade.pairlists.expanded_blacklist, 'errors': errors, } return res
def init_plotscript(config, markets: List, startup_candles: int = 0): """ Initialize objects needed for plotting :return: Dict with candle (OHLCV) data, trades and pairs """ if "pairs" in config: pairs = expand_pairlist(config['pairs'], markets) else: pairs = expand_pairlist(config['exchange']['pair_whitelist'], markets) # Set timerange to use timerange = TimeRange.parse_timerange(config.get('timerange')) data = load_data( datadir=config.get('datadir'), pairs=pairs, timeframe=config['timeframe'], timerange=timerange, startup_candles=startup_candles, data_format=config.get('dataformat_ohlcv', 'json'), ) if startup_candles and data: min_date, max_date = get_timerange(data) logger.info(f"Loading data from {min_date} to {max_date}") timerange.adjust_start_if_necessary( timeframe_to_seconds(config['timeframe']), startup_candles, min_date) no_trades = False filename = config.get('exportfilename') if config.get('no_trades', False): no_trades = True elif config['trade_source'] == 'file': if not filename.is_dir() and not filename.is_file(): logger.warning("Backtest file is missing skipping trades.") no_trades = True try: trades = load_trades( config['trade_source'], db_url=config.get('db_url'), exportfilename=filename, no_trades=no_trades, strategy=config.get('strategy'), ) except ValueError as e: raise OperationalException(e) from e if not trades.empty: trades = trim_dataframe(trades, timerange, 'open_date') return { "ohlcv": data, "trades": trades, "pairs": pairs, "timerange": timerange, }
def __init__(self, config: Dict[str, Any], exchange, strategy) -> None: self.config = config self.exchange = exchange self.strategy: IStrategy = strategy self.edge_config = self.config.get('edge', {}) self._cached_pairs: Dict[str, Any] = {} # Keeps a list of pairs self._final_pairs: list = [] # checking max_open_trades. it should be -1 as with Edge # the number of trades is determined by position size if self.config['max_open_trades'] != float('inf'): logger.critical('max_open_trades should be -1 in config !') if self.config['stake_amount'] != UNLIMITED_STAKE_AMOUNT: raise OperationalException( 'Edge works only with unlimited stake amount') self._capital_ratio: float = self.config['tradable_balance_ratio'] self._allowed_risk: float = self.edge_config.get('allowed_risk') self._since_number_of_days: int = self.edge_config.get( 'calculate_since_number_of_days', 14) self._last_updated: int = 0 # Timestamp of pairs last updated time self._refresh_pairs = True self._stoploss_range_min = float( self.edge_config.get('stoploss_range_min', -0.01)) self._stoploss_range_max = float( self.edge_config.get('stoploss_range_max', -0.05)) self._stoploss_range_step = float( self.edge_config.get('stoploss_range_step', -0.001)) # calculating stoploss range self._stoploss_range = np.arange(self._stoploss_range_min, self._stoploss_range_max, self._stoploss_range_step) self._timerange: TimeRange = TimeRange.parse_timerange( "%s-" % arrow.now().shift( days=-1 * self._since_number_of_days).format('YYYYMMDD')) if config.get('fee'): self.fee = config['fee'] else: try: self.fee = self.exchange.get_fee(symbol=expand_pairlist( self.config['exchange']['pair_whitelist'], list(self.exchange.markets))[0]) except IndexError: self.fee = None
def verify_whitelist(self, pairlist: List[str], logmethod, keep_invalid: bool = False) -> List[str]: """ Verify and remove items from pairlist - returning a filtered pairlist. Logs a warning or info depending on `aswarning`. Pairlist Handlers explicitly using this method shall use `logmethod=logger.info` to avoid spamming with warning messages :param pairlist: Pairlist to validate :param logmethod: Function that'll be called, `logger.info` or `logger.warning` :param keep_invalid: If sets to True, drops invalid pairs silently while expanding regexes. :return: pairlist - whitelisted pairs """ try: whitelist = expand_pairlist(pairlist, self._exchange.get_markets().keys(), keep_invalid) except ValueError as err: logger.error(f"Pair whitelist contains an invalid Wildcard: {err}") return [] return whitelist
def start_convert_trades(args: Dict[str, Any]) -> None: config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE) timerange = TimeRange() # Remove stake-currency to skip checks which are not relevant for datadownload config['stake_currency'] = '' if 'pairs' not in config: raise OperationalException( "Downloading data requires a list of pairs. " "Please check the documentation on how to configure this.") # Init exchange exchange = ExchangeResolver.load_exchange(config['exchange']['name'], config, validate=False) # Manual validations of relevant settings if not config['exchange'].get('skip_pair_validation', False): exchange.validate_pairs(config['pairs']) expanded_pairs = expand_pairlist(config['pairs'], list(exchange.markets)) logger.info(f"About to Convert pairs: {expanded_pairs}, " f"intervals: {config['timeframes']} to {config['datadir']}") for timeframe in config['timeframes']: exchange.validate_timeframes(timeframe) # Convert downloaded trade data to different timeframes convert_trades_to_ohlcv( pairs=expanded_pairs, timeframes=config['timeframes'], datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')), data_format_ohlcv=config['dataformat_ohlcv'], data_format_trades=config['dataformat_trades'], )
def start_download_data(args: Dict[str, Any]) -> None: """ Download data (former download_backtest_data.py script) """ config = setup_utils_configuration(args, RunMode.UTIL_EXCHANGE) if 'days' in config and 'timerange' in config: raise OperationalException( "--days and --timerange are mutually exclusive. " "You can only specify one or the other.") timerange = TimeRange() if 'days' in config: time_since = (datetime.now() - timedelta(days=config['days'])).strftime("%Y%m%d") timerange = TimeRange.parse_timerange(f'{time_since}-') if 'timerange' in config: timerange = timerange.parse_timerange(config['timerange']) # Remove stake-currency to skip checks which are not relevant for datadownload config['stake_currency'] = '' if 'pairs' not in config: raise OperationalException( "Downloading data requires a list of pairs. " "Please check the documentation on how to configure this.") pairs_not_available: List[str] = [] # Init exchange exchange = ExchangeResolver.load_exchange(config['exchange']['name'], config, validate=False) markets = [ p for p, m in exchange.markets.items() if market_is_active(m) or config.get('include_inactive') ] expanded_pairs = expand_pairlist(config['pairs'], markets) # Manual validations of relevant settings if not config['exchange'].get('skip_pair_validation', False): exchange.validate_pairs(expanded_pairs) logger.info(f"About to download pairs: {expanded_pairs}, " f"intervals: {config['timeframes']} to {config['datadir']}") for timeframe in config['timeframes']: exchange.validate_timeframes(timeframe) try: if config.get('download_trades'): pairs_not_available = refresh_backtest_trades_data( exchange, pairs=expanded_pairs, datadir=config['datadir'], timerange=timerange, new_pairs_days=config['new_pairs_days'], erase=bool(config.get('erase')), data_format=config['dataformat_trades']) # Convert downloaded trade data to different timeframes convert_trades_to_ohlcv( pairs=expanded_pairs, timeframes=config['timeframes'], datadir=config['datadir'], timerange=timerange, erase=bool(config.get('erase')), data_format_ohlcv=config['dataformat_ohlcv'], data_format_trades=config['dataformat_trades'], ) else: pairs_not_available = refresh_backtest_ohlcv_data( exchange, pairs=expanded_pairs, timeframes=config['timeframes'], datadir=config['datadir'], timerange=timerange, new_pairs_days=config['new_pairs_days'], erase=bool(config.get('erase')), data_format=config['dataformat_ohlcv']) except KeyboardInterrupt: sys.exit("SIGINT received, aborting ...") finally: if pairs_not_available: logger.info( f"Pairs [{','.join(pairs_not_available)}] not available " f"on exchange {exchange.name}.")
def expanded_blacklist(self) -> List[str]: """The expanded blacklist (including wildcard expansion)""" return expand_pairlist(self._blacklist, self._exchange.get_markets().keys())
def expanded_whitelist(self) -> List[str]: """The expanded whitelist (including wildcard expansion), filtering invalid pairs""" return expand_pairlist(self._whitelist, self._exchange.get_markets().keys())
def expanded_whitelist_keep_invalid(self) -> List[str]: """The expanded whitelist (including wildcard expansion), maintaining invalid pairs""" return expand_pairlist(self._whitelist, self._exchange.get_markets().keys(), keep_invalid=True)
def calculate(self) -> bool: pairs = expand_pairlist(self.config['exchange']['pair_whitelist'], list(self.exchange.markets)) heartbeat = self.edge_config.get('process_throttle_secs') if (self._last_updated > 0) and ( self._last_updated + heartbeat > arrow.utcnow().int_timestamp): return False data: Dict[str, Any] = {} logger.info('Using stake_currency: %s ...', self.config['stake_currency']) logger.info('Using local backtesting data (using whitelist in given config) ...') if self._refresh_pairs: refresh_data( datadir=self.config['datadir'], pairs=pairs, exchange=self.exchange, timeframe=self.strategy.timeframe, timerange=self._timerange, data_format=self.config.get('dataformat_ohlcv', 'json'), ) data = load_data( datadir=self.config['datadir'], pairs=pairs, timeframe=self.strategy.timeframe, timerange=self._timerange, startup_candles=self.strategy.startup_candle_count, data_format=self.config.get('dataformat_ohlcv', 'json'), ) if not data: # Reinitializing cached pairs self._cached_pairs = {} logger.critical("No data found. Edge is stopped ...") return False preprocessed = self.strategy.ohlcvdata_to_dataframe(data) # Print timeframe min_date, max_date = get_timerange(preprocessed) logger.info(f'Measuring data from {min_date.strftime(DATETIME_PRINT_FORMAT)} ' f'up to {max_date.strftime(DATETIME_PRINT_FORMAT)} ' f'({(max_date - min_date).days} days)..') headers = ['date', 'buy', 'open', 'close', 'sell', 'high', 'low'] trades: list = [] for pair, pair_data in preprocessed.items(): # Sorting dataframe by date and reset index pair_data = pair_data.sort_values(by=['date']) pair_data = pair_data.reset_index(drop=True) df_analyzed = self.strategy.advise_sell( self.strategy.advise_buy(pair_data, {'pair': pair}), {'pair': pair})[headers].copy() trades += self._find_trades_for_stoploss_range(df_analyzed, pair, self._stoploss_range) # If no trade found then exit if len(trades) == 0: logger.info("No trades found.") return False # Fill missing, calculable columns, profit, duration , abs etc. trades_df = self._fill_calculable_fields(DataFrame(trades)) self._cached_pairs = self._process_expectancy(trades_df) self._last_updated = arrow.utcnow().int_timestamp return True