def convert_trades_to_ohlcv(pairs: List[str], timeframes: List[str], datadir: Path, timerange: TimeRange, erase: bool = False, data_format_ohlcv: str = 'json', data_format_trades: str = 'jsongz') -> None: """ Convert stored trades data to ohlcv data """ data_handler_trades = get_datahandler(datadir, data_format=data_format_trades) data_handler_ohlcv = get_datahandler(datadir, data_format=data_format_ohlcv) for pair in pairs: trades = data_handler_trades.trades_load(pair) for timeframe in timeframes: if erase: if data_handler_ohlcv.ohlcv_purge(pair, timeframe): logger.info( f'Deleting existing data for pair {pair}, interval {timeframe}.' ) ohlcv = trades_to_ohlcv(trades, timeframe) # Store ohlcv data_handler_ohlcv.ohlcv_store(pair, timeframe, data=ohlcv)
def convert_trades_to_ohlcv( pairs: List[str], timeframes: List[str], datadir: Path, timerange: TimeRange, erase: bool = False, data_format_ohlcv: str = 'json', data_format_trades: str = 'jsongz', candle_type: CandleType = CandleType.SPOT ) -> None: """ Convert stored trades data to ohlcv data """ data_handler_trades = get_datahandler(datadir, data_format=data_format_trades) data_handler_ohlcv = get_datahandler(datadir, data_format=data_format_ohlcv) for pair in pairs: trades = data_handler_trades.trades_load(pair) for timeframe in timeframes: if erase: if data_handler_ohlcv.ohlcv_purge(pair, timeframe, candle_type=candle_type): logger.info(f'Deleting existing data for pair {pair}, interval {timeframe}.') try: ohlcv = trades_to_ohlcv(trades, timeframe) # Store ohlcv data_handler_ohlcv.ohlcv_store(pair, timeframe, data=ohlcv, candle_type=candle_type) except ValueError: logger.exception(f'Could not convert {pair} to OHLCV.')
def convert_trades_format(config: Dict[str, Any], convert_from: str, convert_to: str, erase: bool): """ Convert trades from one format to another format. Parameters: ----------- config: Config dictionary convert_from: Source format convert_to: Target format erase: Erase souce data (does not apply if source and target format are identical) """ from freqtrade.data.history.idatahandler import get_datahandler src = get_datahandler(config['datadir'], convert_from) trg = get_datahandler(config['datadir'], convert_to) if 'pairs' not in config: config['pairs'] = src.trades_get_pairs(config['datadir']) logger.info(f"Converting trades for {config['pairs']}") for pair in config['pairs']: data = src.trades_load(pair=pair) logger.info(f"Converting {len(data)} trades for {pair}") trg.trades_store(pair, data) if erase and convert_from != convert_to: logger.info(f"Deleting source Trade data for {pair}.") src.trades_purge(pair=pair)
def convert_ohlcv_format(config: Dict[str, Any], convert_from: str, convert_to: str, erase: bool): """ Convert OHLCV from one format to another :param config: Config dictionary :param convert_from: Source format :param convert_to: Target format :param erase: Erase souce data (does not apply if source and target format are identical) """ from freqtrade.data.history.idatahandler import get_datahandler src = get_datahandler(config['datadir'], convert_from) trg = get_datahandler(config['datadir'], convert_to) timeframes = config.get('timeframes', [config.get('timeframe')]) logger.info(f"Converting candle (OHLCV) for timeframe {timeframes}") if 'pairs' not in config: config['pairs'] = [] # Check timeframes or fall back to timeframe. for timeframe in timeframes: config['pairs'].extend(src.ohlcv_get_pairs(config['datadir'], timeframe)) logger.info(f"Converting candle (OHLCV) data for {config['pairs']}") for timeframe in timeframes: for pair in config['pairs']: data = src.ohlcv_load(pair=pair, timeframe=timeframe, timerange=None, fill_missing=False, drop_incomplete=False, startup_candles=0) logger.info(f"Converting {len(data)} candles for {pair}") if len(data) > 0: trg.ohlcv_store(pair=pair, timeframe=timeframe, data=data) if erase and convert_from != convert_to: logger.info(f"Deleting source data for {pair} / {timeframe}") src.ohlcv_purge(pair=pair, timeframe=timeframe)
def test_get_datahandler(testdatadir): dh = get_datahandler(testdatadir, 'json') assert type(dh) == JsonDataHandler dh = get_datahandler(testdatadir, 'jsongz') assert type(dh) == JsonGzDataHandler dh1 = get_datahandler(testdatadir, 'jsongz', dh) assert id(dh1) == id(dh)
def load_pair_history(pair: str, timeframe: str, datadir: Path, *, timerange: Optional[TimeRange] = None, fill_up_missing: bool = True, drop_incomplete: bool = True, startup_candles: int = 0, data_format: str = None, data_handler: IDataHandler = None, ) -> DataFrame: """ Load cached ohlcv history for the given pair. :param pair: Pair to load data for :param timeframe: Timeframe (e.g. "5m") :param datadir: Path to the data storage location. :param data_format: Format of the data. Ignored if data_handler is set. :param timerange: Limit data to be loaded to this timerange :param fill_up_missing: Fill missing values with "No action"-candles :param drop_incomplete: Drop last candle assuming it may be incomplete. :param startup_candles: Additional candles to load at the start of the period :param data_handler: Initialized data-handler to use. Will be initialized from data_format if not set :return: DataFrame with ohlcv data, or empty DataFrame """ data_handler = get_datahandler(datadir, data_format, data_handler) return data_handler.ohlcv_load(pair=pair, timeframe=timeframe, timerange=timerange, fill_missing=fill_up_missing, drop_incomplete=drop_incomplete, startup_candles=startup_candles, )
def refresh_data(*, datadir: Path, timeframe: str, pairs: List[str], exchange: Exchange, data_format: str = None, timerange: Optional[TimeRange] = None, candle_type: CandleType, ) -> None: """ Refresh ohlcv history data for a list of pairs. :param datadir: Path to the data storage location. :param timeframe: Timeframe (e.g. "5m") :param pairs: List of pairs to load :param exchange: Exchange object :param data_format: dataformat to use :param timerange: Limit data to be loaded to this timerange :param candle_type: Any of the enum CandleType (must match trading mode!) """ data_handler = get_datahandler(datadir, data_format) for idx, pair in enumerate(pairs): process = f'{idx}/{len(pairs)}' _download_pair_history(pair=pair, process=process, timeframe=timeframe, datadir=datadir, timerange=timerange, exchange=exchange, data_handler=data_handler, candle_type=candle_type)
def test_download_trades_history(trades_history, mocker, default_conf, testdatadir, caplog) -> None: ght_mock = MagicMock( side_effect=lambda pair, *args, **kwargs: (pair, trades_history)) mocker.patch('freqtrade.exchange.Exchange.get_historic_trades', ght_mock) exchange = get_patched_exchange(mocker, default_conf) file1 = testdatadir / 'ETH_BTC-trades.json.gz' data_handler = get_datahandler(testdatadir, data_format='jsongz') _backup_file(file1) assert not file1.is_file() assert _download_trades_history(data_handler=data_handler, exchange=exchange, pair='ETH/BTC') assert log_has("New Amount of trades: 5", caplog) assert file1.is_file() # clean files freshly downloaded _clean_test_file(file1) mocker.patch('freqtrade.exchange.Exchange.get_historic_trades', MagicMock(side_effect=ValueError)) assert not _download_trades_history( data_handler=data_handler, exchange=exchange, pair='ETH/BTC') assert log_has_re( 'Failed to download historic trades for pair: "ETH/BTC".*', caplog)
def refresh_data( datadir: Path, timeframe: str, pairs: List[str], exchange: Exchange, data_format: str = None, timerange: Optional[TimeRange] = None, ) -> None: """ Refresh ohlcv history data for a list of pairs. :param datadir: Path to the data storage location. :param timeframe: Timeframe (e.g. "5m") :param pairs: List of pairs to load :param exchange: Exchange object :param timerange: Limit data to be loaded to this timerange """ data_handler = get_datahandler(datadir, data_format) for pair in pairs: _download_pair_history(pair=pair, timeframe=timeframe, datadir=datadir, timerange=timerange, exchange=exchange, data_handler=data_handler)
def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes: List[str], datadir: Path, timerange: Optional[TimeRange] = None, erase: bool = False, data_format: str = None) -> List[str]: """ Refresh stored ohlcv data for backtesting and hyperopt operations. Used by freqtrade download-data subcommand. :return: List of pairs that are not available. """ pairs_not_available = [] data_handler = get_datahandler(datadir, data_format) for pair in pairs: if pair not in exchange.markets: pairs_not_available.append(pair) logger.info(f"Skipping pair {pair}...") continue for timeframe in timeframes: if erase: if data_handler.ohlcv_purge(pair, timeframe): logger.info( f'Deleting existing data for pair {pair}, interval {timeframe}.') logger.info(f'Downloading pair {pair}, interval {timeframe}.') _download_pair_history(datadir=datadir, exchange=exchange, pair=pair, timeframe=str(timeframe), timerange=timerange, data_handler=data_handler) return pairs_not_available
def test_datahandler_ohlcv_append( datahandler, testdatadir, ): dh = get_datahandler(testdatadir, datahandler) with pytest.raises(NotImplementedError): dh.ohlcv_append('UNITTEST/ETH', '5m', DataFrame())
def refresh_backtest_trades_data(exchange: Exchange, pairs: List[str], datadir: Path, timerange: TimeRange, erase: bool = False, data_format: str = 'jsongz') -> List[str]: """ Refresh stored trades data for backtesting and hyperopt operations. Used by freqtrade download-data subcommand. :return: List of pairs that are not available. """ pairs_not_available = [] data_handler = get_datahandler(datadir, data_format=data_format) for pair in pairs: if pair not in exchange.markets: pairs_not_available.append(pair) logger.info(f"Skipping pair {pair}...") continue if erase: if data_handler.trades_purge(pair): logger.info(f'Deleting existing data for pair {pair}.') logger.info(f'Downloading trades for pair {pair}.') _download_trades_history(exchange=exchange, pair=pair, timerange=timerange, data_handler=data_handler) return pairs_not_available
def start_list_data(args: Dict[str, Any]) -> None: """ List available backtest data """ config = setup_utils_configuration(args, RunMode.UTIL_NO_EXCHANGE) from tabulate import tabulate from freqtrade.data.history.idatahandler import get_datahandler dhc = get_datahandler(config['datadir'], config['dataformat_ohlcv']) paircombs = dhc.ohlcv_get_available_data(config['datadir']) if args['pairs']: paircombs = [comb for comb in paircombs if comb[0] in args['pairs']] print(f"Found {len(paircombs)} pair / timeframe combinations.") groupedpair = defaultdict(list) for pair, timeframe in sorted(paircombs, key=lambda x: (x[0], timeframe_to_minutes(x[1]))): groupedpair[pair].append(timeframe) if groupedpair: print( tabulate([(pair, ', '.join(timeframes)) for pair, timeframes in groupedpair.items()], headers=("Pair", "Timeframe"), tablefmt='psql', stralign='right'))
def test_download_trades_history(trades_history, mocker, default_conf, testdatadir, caplog) -> None: ght_mock = MagicMock(side_effect=lambda pair, *args, **kwargs: (pair, trades_history)) mocker.patch('freqtrade.exchange.Exchange.get_historic_trades', ght_mock) exchange = get_patched_exchange(mocker, default_conf) file1 = testdatadir / 'ETH_BTC-trades.json.gz' data_handler = get_datahandler(testdatadir, data_format='jsongz') _backup_file(file1) assert not file1.is_file() assert _download_trades_history(data_handler=data_handler, exchange=exchange, pair='ETH/BTC') assert log_has("New Amount of trades: 5", caplog) assert file1.is_file() ght_mock.reset_mock() since_time = int(trades_history[-3][0] // 1000) since_time2 = int(trades_history[-1][0] // 1000) timerange = TimeRange('date', None, since_time, 0) assert _download_trades_history(data_handler=data_handler, exchange=exchange, pair='ETH/BTC', timerange=timerange) assert ght_mock.call_count == 1 # Check this in seconds - since we had to convert to seconds above too. assert int(ght_mock.call_args_list[0][1]['since'] // 1000) == since_time2 - 5 assert ght_mock.call_args_list[0][1]['from_id'] is not None # clean files freshly downloaded _clean_test_file(file1) mocker.patch('freqtrade.exchange.Exchange.get_historic_trades', MagicMock(side_effect=ValueError)) assert not _download_trades_history(data_handler=data_handler, exchange=exchange, pair='ETH/BTC') assert log_has_re('Failed to download historic trades for pair: "ETH/BTC".*', caplog) file2 = testdatadir / 'XRP_ETH-trades.json.gz' _backup_file(file2, True) ght_mock.reset_mock() mocker.patch('freqtrade.exchange.Exchange.get_historic_trades', ght_mock) # Since before first start date since_time = int(trades_history[0][0] // 1000) - 500 timerange = TimeRange('date', None, since_time, 0) assert _download_trades_history(data_handler=data_handler, exchange=exchange, pair='XRP/ETH', timerange=timerange) assert ght_mock.call_count == 1 assert int(ght_mock.call_args_list[0][1]['since'] // 1000) == since_time assert ght_mock.call_args_list[0][1]['from_id'] is None assert log_has_re(r'Start earlier than available data. Redownloading trades for.*', caplog) _clean_test_file(file2)
def test_load_cached_data_for_updating(mocker, testdatadir) -> None: data_handler = get_datahandler(testdatadir, 'json') test_data = None test_filename = testdatadir.joinpath('UNITTEST_BTC-1m.json') with open(test_filename, "rt") as file: test_data = json.load(file) test_data_df = ohlcv_to_dataframe(test_data, '1m', 'UNITTEST/BTC', fill_missing=False, drop_incomplete=False) # now = last cached item + 1 hour now_ts = test_data[-1][0] / 1000 + 60 * 60 mocker.patch('arrow.utcnow', return_value=arrow.get(now_ts)) # timeframe starts earlier than the cached data # should fully update data timerange = TimeRange('date', None, test_data[0][0] / 1000 - 1, 0) data, start_ts = _load_cached_data_for_updating('UNITTEST/BTC', '1m', timerange, data_handler) assert data.empty assert start_ts == test_data[0][0] - 1000 # timeframe starts in the center of the cached data # should return the chached data w/o the last item timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0) data, start_ts = _load_cached_data_for_updating('UNITTEST/BTC', '1m', timerange, data_handler) assert_frame_equal(data, test_data_df.iloc[:-1]) assert test_data[-2][0] <= start_ts < test_data[-1][0] # timeframe starts after the chached data # should return the chached data w/o the last item timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 100, 0) data, start_ts = _load_cached_data_for_updating('UNITTEST/BTC', '1m', timerange, data_handler) assert_frame_equal(data, test_data_df.iloc[:-1]) assert test_data[-2][0] <= start_ts < test_data[-1][0] # no datafile exist # should return timestamp start time timerange = TimeRange('date', None, now_ts - 10000, 0) data, start_ts = _load_cached_data_for_updating('NONEXIST/BTC', '1m', timerange, data_handler) assert data.empty assert start_ts == (now_ts - 10000) * 1000 # no datafile exist, no timeframe is set # should return an empty array and None data, start_ts = _load_cached_data_for_updating('NONEXIST/BTC', '1m', None, data_handler) assert data.empty assert start_ts is None
def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes: List[str], datadir: Path, trading_mode: str, timerange: Optional[TimeRange] = None, new_pairs_days: int = 30, erase: bool = False, data_format: str = None, ) -> List[str]: """ Refresh stored ohlcv data for backtesting and hyperopt operations. Used by freqtrade download-data subcommand. :return: List of pairs that are not available. """ pairs_not_available = [] data_handler = get_datahandler(datadir, data_format) candle_type = CandleType.get_default(trading_mode) for idx, pair in enumerate(pairs, start=1): if pair not in exchange.markets: pairs_not_available.append(pair) logger.info(f"Skipping pair {pair}...") continue for timeframe in timeframes: logger.info(f'Downloading pair {pair}, interval {timeframe}.') process = f'{idx}/{len(pairs)}' _download_pair_history(pair=pair, process=process, datadir=datadir, exchange=exchange, timerange=timerange, data_handler=data_handler, timeframe=str(timeframe), new_pairs_days=new_pairs_days, candle_type=candle_type, erase=erase) if trading_mode == 'futures': # Predefined candletype (and timeframe) depending on exchange # Downloads what is necessary to backtest based on futures data. tf_mark = exchange._ft_has['mark_ohlcv_timeframe'] fr_candle_type = CandleType.from_string(exchange._ft_has['mark_ohlcv_price']) # All exchanges need FundingRate for futures trading. # The timeframe is aligned to the mark-price timeframe. for funding_candle_type in (CandleType.FUNDING_RATE, fr_candle_type): _download_pair_history(pair=pair, process=process, datadir=datadir, exchange=exchange, timerange=timerange, data_handler=data_handler, timeframe=str(tf_mark), new_pairs_days=new_pairs_days, candle_type=funding_candle_type, erase=erase) return pairs_not_available
def load_data(datadir: Path, timeframe: str, pairs: List[str], *, timerange: Optional[TimeRange] = None, fill_up_missing: bool = True, startup_candles: int = 0, fail_without_data: bool = False, data_format: str = 'json', candle_type: CandleType = CandleType.SPOT ) -> Dict[str, DataFrame]: """ Load ohlcv history data for a list of pairs. :param datadir: Path to the data storage location. :param timeframe: Timeframe (e.g. "5m") :param pairs: List of pairs to load :param timerange: Limit data to be loaded to this timerange :param fill_up_missing: Fill missing values with "No action"-candles :param startup_candles: Additional candles to load at the start of the period :param fail_without_data: Raise OperationalException if no data is found. :param data_format: Data format which should be used. Defaults to json :param candle_type: Any of the enum CandleType (must match trading mode!) :return: dict(<pair>:<Dataframe>) """ result: Dict[str, DataFrame] = {} if startup_candles > 0 and timerange: logger.info(f'Using indicator startup period: {startup_candles} ...') data_handler = get_datahandler(datadir, data_format) for pair in pairs: hist = load_pair_history(pair=pair, timeframe=timeframe, datadir=datadir, timerange=timerange, fill_up_missing=fill_up_missing, startup_candles=startup_candles, data_handler=data_handler, candle_type=candle_type ) if not hist.empty: result[pair] = hist if fail_without_data and not result: raise OperationalException("No data found. Terminating.") return result
def refresh_backtest_ohlcv_data(exchange: Exchange, pairs: List[str], timeframes: List[str], datadir: Path, timerange: Optional[TimeRange] = None, new_pairs_days: int = 30, erase: bool = False, data_format: str = None) -> List[str]: """ Refresh stored ohlcv data for backtesting and hyperopt operations. Used by freqtrade download-data subcommand. :return: List of pairs that are not available. """ pairs_not_available = [] data_handler = get_datahandler(datadir, data_format) total_downloads = len(pairs) * len(timeframes) iteration = 0 logger.info(f'Total number of downloads: {total_downloads}') for pair in pairs: if pair not in exchange.markets: pairs_not_available.append(pair) logger.info(f"Skipping pair {pair}...") continue for timeframe in timeframes: iteration += 1 progress_pct = round(iteration/total_downloads * 100, 1) logger.info(f'Download progress: {iteration}/{total_downloads} ({progress_pct} %)') if erase: if data_handler.ohlcv_purge(pair, timeframe): logger.info( f'Deleting existing data for pair {pair}, interval {timeframe}.') logger.info(f'Downloading pair {pair}, interval {timeframe}.') _download_pair_history(datadir=datadir, exchange=exchange, pair=pair, timeframe=str(timeframe), new_pairs_days=new_pairs_days, timerange=timerange, data_handler=data_handler) return pairs_not_available
def _download_pair_history(pair: str, *, datadir: Path, exchange: Exchange, timeframe: str = '5m', process: str = '', new_pairs_days: int = 30, data_handler: IDataHandler = None, timerange: Optional[TimeRange] = None, candle_type: CandleType, erase: bool = False, ) -> bool: """ Download latest candles from the exchange for the pair and timeframe passed in parameters The data is downloaded starting from the last correct data that exists in a cache. If timerange starts earlier than the data in the cache, the full data will be redownloaded Based on @Rybolov work: https://github.com/rybolov/freqtrade-data :param pair: pair to download :param timeframe: Timeframe (e.g "5m") :param timerange: range of time to download :param candle_type: Any of the enum CandleType (must match trading mode!) :param erase: Erase existing data :return: bool with success state """ data_handler = get_datahandler(datadir, data_handler=data_handler) try: if erase: if data_handler.ohlcv_purge(pair, timeframe, candle_type=candle_type): logger.info(f'Deleting existing data for pair {pair}, {timeframe}, {candle_type}.') logger.info( f'Download history data for pair: "{pair}" ({process}), timeframe: {timeframe}, ' f'candle type: {candle_type} and store in {datadir}.' ) data, since_ms = _load_cached_data_for_updating(pair, timeframe, timerange, data_handler=data_handler, candle_type=candle_type) logger.debug("Current Start: %s", f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None') logger.debug("Current End: %s", f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None') # Default since_ms to 30 days if nothing is given new_data = exchange.get_historic_ohlcv(pair=pair, timeframe=timeframe, since_ms=since_ms if since_ms else arrow.utcnow().shift( days=-new_pairs_days).int_timestamp * 1000, is_new_pair=data.empty, candle_type=candle_type, ) # TODO: Maybe move parsing to exchange class (?) new_dataframe = ohlcv_to_dataframe(new_data, timeframe, pair, fill_missing=False, drop_incomplete=True) if data.empty: data = new_dataframe else: # Run cleaning again to ensure there were no duplicate candles # Especially between existing and new data. data = clean_ohlcv_dataframe(concat([data, new_dataframe], axis=0), timeframe, pair, fill_missing=False, drop_incomplete=False) logger.debug("New Start: %s", f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None') logger.debug("New End: %s", f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None') data_handler.ohlcv_store(pair, timeframe, data=data, candle_type=candle_type) return True except Exception: logger.exception( f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}.' ) return False
def test_datahandler_trades_append(datahandler, testdatadir): dh = get_datahandler(testdatadir, datahandler) with pytest.raises(NotImplementedError): dh.trades_append('UNITTEST/ETH', [])
def _download_pair_history(datadir: Path, exchange: Exchange, pair: str, *, timeframe: str = '5m', timerange: Optional[TimeRange] = None, data_handler: IDataHandler = None) -> bool: """ Download latest candles from the exchange for the pair and timeframe passed in parameters The data is downloaded starting from the last correct data that exists in a cache. If timerange starts earlier than the data in the cache, the full data will be redownloaded Based on @Rybolov work: https://github.com/rybolov/freqtrade-data :param pair: pair to download :param timeframe: Timeframe (e.g "5m") :param timerange: range of time to download :return: bool with success state """ data_handler = get_datahandler(datadir, data_handler=data_handler) try: logger.info( f'Download history data for pair: "{pair}", timeframe: {timeframe} ' f'and store in {datadir}.' ) # data, since_ms = _load_cached_data_for_updating_old(datadir, pair, timeframe, timerange) data, since_ms = _load_cached_data_for_updating(pair, timeframe, timerange, data_handler=data_handler) logger.debug("Current Start: %s", f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None') logger.debug("Current End: %s", f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None') # Default since_ms to 30 days if nothing is given new_data = exchange.get_historic_ohlcv(pair=pair, timeframe=timeframe, since_ms=since_ms if since_ms else int(arrow.utcnow().shift( days=-30).float_timestamp) * 1000 ) # TODO: Maybe move parsing to exchange class (?) new_dataframe = ohlcv_to_dataframe(new_data, timeframe, pair, fill_missing=False, drop_incomplete=True) if data.empty: data = new_dataframe else: data = data.append(new_dataframe) logger.debug("New Start: %s", f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None') logger.debug("New End: %s", f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None') data_handler.ohlcv_store(pair, timeframe, data=data) return True except Exception as e: logger.error( f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}. ' f'Error: {e}' ) return False