Ejemplo n.º 1
0
def _download_pair_history(datadir: Path,
                           exchange: Exchange,
                           pair: str,
                           timeframe: str = '5m',
                           timerange: Optional[TimeRange] = None) -> bool:
    """
    Download latest candles from the exchange for the pair and timeframe passed in parameters
    The data is downloaded starting from the last correct data that
    exists in a cache. If timerange starts earlier than the data in the cache,
    the full data will be redownloaded

    Based on @Rybolov work: https://github.com/rybolov/freqtrade-data

    :param pair: pair to download
    :param timeframe: Ticker Timeframe (e.g 5m)
    :param timerange: range of time to download
    :return: bool with success state
    """
    try:
        logger.info(
            f'Download history data for pair: "{pair}", timeframe: {timeframe} '
            f'and store in {datadir}.'
        )

        data, since_ms = _load_cached_data_for_updating(datadir, pair, timeframe, timerange)

        logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
        logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')

        # Default since_ms to 30 days if nothing is given
        new_data = exchange.get_historic_ohlcv(pair=pair,
                                               timeframe=timeframe,
                                               since_ms=since_ms if since_ms else
                                               int(arrow.utcnow().shift(
                                                   days=-30).float_timestamp) * 1000
                                               )
        data.extend(new_data)

        logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
        logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))

        store_tickerdata_file(datadir, pair, timeframe, data=data)
        return True

    except Exception as e:
        logger.error(
            f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}. '
            f'Error: {e}'
        )
        return False
Ejemplo n.º 2
0
def _download_pair_history(datadir: Path,
                           exchange: Exchange,
                           pair: str, *,
                           timeframe: str = '5m',
                           timerange: Optional[TimeRange] = None,
                           data_handler: IDataHandler = None) -> bool:
    """
    Download latest candles from the exchange for the pair and timeframe passed in parameters
    The data is downloaded starting from the last correct data that
    exists in a cache. If timerange starts earlier than the data in the cache,
    the full data will be redownloaded

    Based on @Rybolov work: https://github.com/rybolov/freqtrade-data

    :param pair: pair to download
    :param timeframe: Timeframe (e.g "5m")
    :param timerange: range of time to download
    :return: bool with success state
    """
    data_handler = get_datahandler(datadir, data_handler=data_handler)

    try:
        logger.info(
            f'Download history data for pair: "{pair}", timeframe: {timeframe} '
            f'and store in {datadir}.'
        )

        # data, since_ms = _load_cached_data_for_updating_old(datadir, pair, timeframe, timerange)
        data, since_ms = _load_cached_data_for_updating(pair, timeframe, timerange,
                                                        data_handler=data_handler)

        logger.debug("Current Start: %s",
                     f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
        logger.debug("Current End: %s",
                     f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')

        # Default since_ms to 30 days if nothing is given
        new_data = exchange.get_historic_ohlcv(pair=pair,
                                               timeframe=timeframe,
                                               since_ms=since_ms if since_ms else
                                               int(arrow.utcnow().shift(
                                                   days=-30).float_timestamp) * 1000
                                               )
        # TODO: Maybe move parsing to exchange class (?)
        new_dataframe = ohlcv_to_dataframe(new_data, timeframe, pair,
                                           fill_missing=False, drop_incomplete=True)
        if data.empty:
            data = new_dataframe
        else:
            data = data.append(new_dataframe)

        logger.debug("New  Start: %s",
                     f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
        logger.debug("New End: %s",
                     f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')

        data_handler.ohlcv_store(pair, timeframe, data=data)
        return True

    except Exception as e:
        logger.error(
            f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}. '
            f'Error: {e}'
        )
        return False
Ejemplo n.º 3
0
def _download_pair_history(pair: str, *,
                           datadir: Path,
                           exchange: Exchange,
                           timeframe: str = '5m',
                           process: str = '',
                           new_pairs_days: int = 30,
                           data_handler: IDataHandler = None,
                           timerange: Optional[TimeRange] = None,
                           candle_type: CandleType,
                           erase: bool = False,
                           ) -> bool:
    """
    Download latest candles from the exchange for the pair and timeframe passed in parameters
    The data is downloaded starting from the last correct data that
    exists in a cache. If timerange starts earlier than the data in the cache,
    the full data will be redownloaded

    Based on @Rybolov work: https://github.com/rybolov/freqtrade-data

    :param pair: pair to download
    :param timeframe: Timeframe (e.g "5m")
    :param timerange: range of time to download
    :param candle_type: Any of the enum CandleType (must match trading mode!)
    :param erase: Erase existing data
    :return: bool with success state
    """
    data_handler = get_datahandler(datadir, data_handler=data_handler)

    try:
        if erase:
            if data_handler.ohlcv_purge(pair, timeframe, candle_type=candle_type):
                logger.info(f'Deleting existing data for pair {pair}, {timeframe}, {candle_type}.')

        logger.info(
            f'Download history data for pair: "{pair}" ({process}), timeframe: {timeframe}, '
            f'candle type: {candle_type} and store in {datadir}.'
        )

        data, since_ms = _load_cached_data_for_updating(pair, timeframe, timerange,
                                                        data_handler=data_handler,
                                                        candle_type=candle_type)

        logger.debug("Current Start: %s",
                     f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
        logger.debug("Current End: %s",
                     f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')

        # Default since_ms to 30 days if nothing is given
        new_data = exchange.get_historic_ohlcv(pair=pair,
                                               timeframe=timeframe,
                                               since_ms=since_ms if since_ms else
                                               arrow.utcnow().shift(
                                                   days=-new_pairs_days).int_timestamp * 1000,
                                               is_new_pair=data.empty,
                                               candle_type=candle_type,
                                               )
        # TODO: Maybe move parsing to exchange class (?)
        new_dataframe = ohlcv_to_dataframe(new_data, timeframe, pair,
                                           fill_missing=False, drop_incomplete=True)
        if data.empty:
            data = new_dataframe
        else:
            # Run cleaning again to ensure there were no duplicate candles
            # Especially between existing and new data.
            data = clean_ohlcv_dataframe(concat([data, new_dataframe], axis=0), timeframe, pair,
                                         fill_missing=False, drop_incomplete=False)

        logger.debug("New  Start: %s",
                     f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
        logger.debug("New End: %s",
                     f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')

        data_handler.ohlcv_store(pair, timeframe, data=data, candle_type=candle_type)
        return True

    except Exception:
        logger.exception(
            f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}.'
        )
        return False