def test_load_data_with_new_pair_1min(ticker_history_list, mocker, caplog, default_conf, testdatadir) -> None: """ Test load_pair_history() with 1 min ticker """ mocker.patch('freqtrade.exchange.Exchange.get_historic_ohlcv', return_value=ticker_history_list) exchange = get_patched_exchange(mocker, default_conf) file = testdatadir / 'MEME_BTC-1m.json' _backup_file(file) # do not download a new pair if refresh_pairs isn't set load_pair_history(datadir=testdatadir, timeframe='1m', pair='MEME/BTC') assert not file.is_file() assert log_has( 'No history data for pair: "MEME/BTC", timeframe: 1m. ' 'Use `freqtrade download-data` to download the data', caplog) # download a new pair if refresh_pairs is set refresh_data(datadir=testdatadir, timeframe='1m', pairs=['MEME/BTC'], exchange=exchange) load_pair_history(datadir=testdatadir, timeframe='1m', pair='MEME/BTC') assert file.is_file() assert log_has_re( 'Download history data for pair: "MEME/BTC", timeframe: 1m ' 'and store in .*', caplog) _clean_test_file(file)
def test_init_with_refresh(default_conf, mocker) -> None: exchange = get_patched_exchange(mocker, default_conf) refresh_data(datadir='', pairs=[], timeframe=default_conf['ticker_interval'], exchange=exchange) assert {} == load_data(datadir='', pairs=[], timeframe=default_conf['ticker_interval'])
def calculate(self, pairs: List[str]) -> bool: if self.fee is None and pairs: self.fee = self.exchange.get_fee(pairs[0]) heartbeat = self.edge_config.get('process_throttle_secs') if (self._last_updated > 0) and ( self._last_updated + heartbeat > arrow.utcnow().int_timestamp): return False data: Dict[str, Any] = {} logger.info('Using stake_currency: %s ...', self.config['stake_currency']) logger.info('Using local backtesting data (using whitelist in given config) ...') if self._refresh_pairs: timerange_startup = deepcopy(self._timerange) timerange_startup.subtract_start(timeframe_to_seconds( self.strategy.timeframe) * self.strategy.startup_candle_count) refresh_data( datadir=self.config['datadir'], pairs=pairs, exchange=self.exchange, timeframe=self.strategy.timeframe, timerange=timerange_startup, data_format=self.config.get('dataformat_ohlcv', 'json'), ) # Download informative pairs too res = defaultdict(list) for p, t in self.strategy.gather_informative_pairs(): res[t].append(p) for timeframe, inf_pairs in res.items(): timerange_startup = deepcopy(self._timerange) timerange_startup.subtract_start(timeframe_to_seconds( timeframe) * self.strategy.startup_candle_count) refresh_data( datadir=self.config['datadir'], pairs=inf_pairs, exchange=self.exchange, timeframe=timeframe, timerange=timerange_startup, data_format=self.config.get('dataformat_ohlcv', 'json'), ) data = load_data( datadir=self.config['datadir'], pairs=pairs, timeframe=self.strategy.timeframe, timerange=self._timerange, startup_candles=self.strategy.startup_candle_count, data_format=self.config.get('dataformat_ohlcv', 'json'), ) if not data: # Reinitializing cached pairs self._cached_pairs = {} logger.critical("No data found. Edge is stopped ...") return False # Fake run-mode to Edge prior_rm = self.config['runmode'] self.config['runmode'] = RunMode.EDGE preprocessed = self.strategy.advise_all_indicators(data) self.config['runmode'] = prior_rm # Print timeframe min_date, max_date = get_timerange(preprocessed) logger.info(f'Measuring data from {min_date.strftime(DATETIME_PRINT_FORMAT)} ' f'up to {max_date.strftime(DATETIME_PRINT_FORMAT)} ' f'({(max_date - min_date).days} days)..') headers = ['date', 'buy', 'open', 'close', 'sell', 'high', 'low'] trades: list = [] for pair, pair_data in preprocessed.items(): # Sorting dataframe by date and reset index pair_data = pair_data.sort_values(by=['date']) pair_data = pair_data.reset_index(drop=True) df_analyzed = self.strategy.advise_sell( self.strategy.advise_buy(pair_data, {'pair': pair}), {'pair': pair})[headers].copy() trades += self._find_trades_for_stoploss_range(df_analyzed, pair, self._stoploss_range) # If no trade found then exit if len(trades) == 0: logger.info("No trades found.") return False # Fill missing, calculable columns, profit, duration , abs etc. trades_df = self._fill_calculable_fields(DataFrame(trades)) self._cached_pairs = self._process_expectancy(trades_df) self._last_updated = arrow.utcnow().int_timestamp return True
def calculate(self) -> bool: pairs = self.config['exchange']['pair_whitelist'] heartbeat = self.edge_config.get('process_throttle_secs') if (self._last_updated > 0) and ( self._last_updated + heartbeat > arrow.utcnow().timestamp): return False data: Dict[str, Any] = {} logger.info('Using stake_currency: %s ...', self.config['stake_currency']) logger.info('Using local backtesting data (using whitelist in given config) ...') if self._refresh_pairs: refresh_data( datadir=self.config['datadir'], pairs=pairs, exchange=self.exchange, timeframe=self.strategy.ticker_interval, timerange=self._timerange, ) data = load_data( datadir=self.config['datadir'], pairs=pairs, timeframe=self.strategy.ticker_interval, timerange=self._timerange, startup_candles=self.strategy.startup_candle_count, data_format=self.config.get('dataformat_ohlcv', 'json'), ) if not data: # Reinitializing cached pairs self._cached_pairs = {} logger.critical("No data found. Edge is stopped ...") return False preprocessed = self.strategy.ohlcvdata_to_dataframe(data) # Print timeframe min_date, max_date = get_timerange(preprocessed) logger.info( 'Measuring data from %s up to %s (%s days) ...', min_date.isoformat(), max_date.isoformat(), (max_date - min_date).days ) headers = ['date', 'buy', 'open', 'close', 'sell', 'high', 'low'] trades: list = [] for pair, pair_data in preprocessed.items(): # Sorting dataframe by date and reset index pair_data = pair_data.sort_values(by=['date']) pair_data = pair_data.reset_index(drop=True) df_analyzed = self.strategy.advise_sell( self.strategy.advise_buy(pair_data, {'pair': pair}), {'pair': pair})[headers].copy() trades += self._find_trades_for_stoploss_range(df_analyzed, pair, self._stoploss_range) # If no trade found then exit if len(trades) == 0: logger.info("No trades found.") return False # Fill missing, calculable columns, profit, duration , abs etc. trades_df = self._fill_calculable_fields(DataFrame(trades)) self._cached_pairs = self._process_expectancy(trades_df) self._last_updated = arrow.utcnow().timestamp return True