Esempio n. 1
0
def download_pair_history(datadir: Optional[Path],
                          exchange: Optional[Exchange],
                          pair: str,
                          ticker_interval: str = '5m',
                          timerange: Optional[TimeRange] = None) -> bool:
    """
    Download the latest ticker intervals from the exchange for the pair passed in parameters
    The data is downloaded starting from the last correct ticker interval data that
    exists in a cache. If timerange starts earlier than the data in the cache,
    the full data will be redownloaded

    Based on @Rybolov work: https://github.com/rybolov/freqtrade-data

    :param pair: pair to download
    :param ticker_interval: ticker interval
    :param timerange: range of time to download
    :return: bool with success state
    """
    if not exchange:
        raise OperationalException(
            "Exchange needs to be initialized when downloading pair history data"
        )

    try:
        filename = pair_data_filename(datadir, pair, ticker_interval)

        logger.info(
            f'Download history data for pair: "{pair}", interval: {ticker_interval} '
            f'and store in {datadir}.')

        data, since_ms = load_cached_data_for_updating(filename,
                                                       ticker_interval,
                                                       timerange)

        logger.debug("Current Start: %s",
                     misc.format_ms_time(data[1][0]) if data else 'None')
        logger.debug("Current End: %s",
                     misc.format_ms_time(data[-1][0]) if data else 'None')

        # Default since_ms to 30 days if nothing is given
        new_data = exchange.get_history(
            pair=pair,
            ticker_interval=ticker_interval,
            since_ms=since_ms if since_ms else
            int(arrow.utcnow().shift(days=-3 * 365).float_timestamp) * 1000)
        data.extend(new_data)

        logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
        logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))

        misc.file_dump_json(filename, data)
        return True

    except Exception as e:
        logger.error(
            f'Failed to download history data for pair: "{pair}", interval: {ticker_interval}. '
            f'Error: {e}')
        return False
Esempio n. 2
0
def test_format_ms_time() -> None:
    # Date 2018-04-10 18:02:01
    date_in_epoch_ms = 1523383321000
    date = format_ms_time(date_in_epoch_ms)
    assert type(date) is str
    res = datetime.datetime(2018, 4, 10, 18, 2, 1, tzinfo=datetime.timezone.utc)
    assert date == res.astimezone(None).strftime('%Y-%m-%dT%H:%M:%S')
    res = datetime.datetime(2017, 12, 13, 8, 2, 1, tzinfo=datetime.timezone.utc)
    # Date 2017-12-13 08:02:01
    date_in_epoch_ms = 1513152121000
    assert format_ms_time(date_in_epoch_ms) == res.astimezone(None).strftime('%Y-%m-%dT%H:%M:%S')
Esempio n. 3
0
def download_pair_history(datadir: Optional[Path],
                          exchange: Exchange,
                          pair: str,
                          ticker_interval: str = '5m',
                          timerange: Optional[TimeRange] = None) -> bool:
    """
    Download the latest ticker intervals from the exchange for the pair passed in parameters
    The data is downloaded starting from the last correct ticker interval data that
    exists in a cache. If timerange starts earlier than the data in the cache,
    the full data will be redownloaded

    Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
    :param pair: pair to download
    :param ticker_interval: ticker interval
    :param timerange: range of time to download
    :return: bool with success state

    """
    try:
        path = make_testdata_path(datadir)
        filepair = pair.replace("/", "_")
        filename = path.joinpath(f'{filepair}-{ticker_interval}.json')

        logger.info('Download the pair: "%s", Interval: %s', pair,
                    ticker_interval)

        data, since_ms = load_cached_data_for_updating(filename,
                                                       ticker_interval,
                                                       timerange)

        logger.debug("Current Start: %s",
                     misc.format_ms_time(data[1][0]) if data else 'None')
        logger.debug("Current End: %s",
                     misc.format_ms_time(data[-1][0]) if data else 'None')

        # Default since_ms to 30 days if nothing is given
        new_data = exchange.get_history(
            pair=pair,
            ticker_interval=ticker_interval,
            since_ms=since_ms if since_ms else
            int(arrow.utcnow().shift(days=-30).float_timestamp) * 1000)
        data.extend(new_data)

        logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
        logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))

        misc.file_dump_json(filename, data)
        return True
    except BaseException:
        logger.info('Failed to download the pair: "%s", Interval: %s', pair,
                    ticker_interval)
        return False
Esempio n. 4
0
def _download_pair_history(datadir: Path,
                           exchange: Exchange,
                           pair: str,
                           timeframe: str = '5m',
                           timerange: Optional[TimeRange] = None) -> bool:
    """
    Download latest candles from the exchange for the pair and timeframe passed in parameters
    The data is downloaded starting from the last correct data that
    exists in a cache. If timerange starts earlier than the data in the cache,
    the full data will be redownloaded

    Based on @Rybolov work: https://github.com/rybolov/freqtrade-data

    :param pair: pair to download
    :param timeframe: Ticker Timeframe (e.g 5m)
    :param timerange: range of time to download
    :return: bool with success state
    """
    try:
        logger.info(
            f'Download history data for pair: "{pair}", timeframe: {timeframe} '
            f'and store in {datadir}.'
        )

        data, since_ms = _load_cached_data_for_updating(datadir, pair, timeframe, timerange)

        logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
        logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')

        # Default since_ms to 30 days if nothing is given
        new_data = exchange.get_historic_ohlcv(pair=pair,
                                               timeframe=timeframe,
                                               since_ms=since_ms if since_ms else
                                               int(arrow.utcnow().shift(
                                                   days=-30).float_timestamp) * 1000
                                               )
        data.extend(new_data)

        logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
        logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))

        store_tickerdata_file(datadir, pair, timeframe, data=data)
        return True

    except Exception as e:
        logger.error(
            f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}. '
            f'Error: {e}'
        )
        return False
Esempio n. 5
0
def download_backtesting_testdata(datadir: str,
                                  exchange: Exchange,
                                  pair: str,
                                  tick_interval: str = '5m',
                                  timerange: Optional[TimeRange] = None) -> None:

    """
    Download the latest ticker intervals from the exchange for the pairs passed in parameters
    The data is downloaded starting from the last correct ticker interval data that
    esists in a cache. If timerange starts earlier than the data in the cache,
    the full data will be redownloaded

    Based on @Rybolov work: https://github.com/rybolov/freqtrade-data
    :param pairs: list of pairs to download
    :param tick_interval: ticker interval
    :param timerange: range of time to download
    :return: None

    """

    path = make_testdata_path(datadir)
    filepair = pair.replace("/", "_")
    filename = os.path.join(path, f'{filepair}-{tick_interval}.json')

    logger.info(
        'Download the pair: "%s", Interval: %s',
        pair,
        tick_interval
    )

    data, since_ms = load_cached_data_for_updating(filename, tick_interval, timerange)

    logger.debug("Current Start: %s", misc.format_ms_time(data[1][0]) if data else 'None')
    logger.debug("Current End: %s", misc.format_ms_time(data[-1][0]) if data else 'None')

    new_data = exchange.get_ticker_history(pair=pair, tick_interval=tick_interval,
                                           since_ms=since_ms)
    data.extend(new_data)

    logger.debug("New Start: %s", misc.format_ms_time(data[0][0]))
    logger.debug("New End: %s", misc.format_ms_time(data[-1][0]))

    misc.file_dump_json(filename, data)