예제 #1
0
def test_parse_ticker_dataframe(ticker_history_list, caplog):
    columns = ['date', 'open', 'high', 'low', 'close', 'volume']

    caplog.set_level(logging.DEBUG)
    # Test file with BV data
    dataframe = parse_ticker_dataframe(ticker_history_list,
                                       '5m',
                                       fill_missing=True)
    assert dataframe.columns.tolist() == columns
    assert log_has('Parsing tickerlist to dataframe', caplog.record_tuples)
예제 #2
0
def test_ohlcv_fill_up_missing_data2(caplog):
    timeframe = '5m'
    ticks = [[
            1511686200000,  # 8:50:00
            8.794e-05,  # open
            8.948e-05,  # high
            8.794e-05,  # low
            8.88e-05,  # close
            2255,  # volume (in quote currency)
        ],
        [
            1511686500000,  # 8:55:00
            8.88e-05,
            8.942e-05,
            8.88e-05,
            8.893e-05,
            9911,
        ],
        [
            1511687100000,  # 9:05:00
            8.891e-05,
            8.893e-05,
            8.875e-05,
            8.877e-05,
            2251
        ],
        [
            1511687400000,  # 9:10:00
            8.877e-05,
            8.883e-05,
            8.895e-05,
            8.817e-05,
            123551
    ]
    ]

    # Generate test-data without filling missing
    data = parse_ticker_dataframe(ticks, timeframe, pair="UNITTEST/BTC", fill_missing=False)
    assert len(data) == 3
    caplog.set_level(logging.DEBUG)
    data2 = ohlcv_fill_up_missing_data(data, timeframe, "UNITTEST/BTC")
    assert len(data2) == 4
    # 3rd candle has been filled
    row = data2.loc[2, :]
    assert row['volume'] == 0
    # close shoult match close of previous candle
    assert row['close'] == data.loc[1, 'close']
    assert row['open'] == row['close']
    assert row['high'] == row['close']
    assert row['low'] == row['close']
    # Column names should not change
    assert (data.columns == data2.columns).all()

    assert log_has(f"Missing data fillup for UNITTEST/BTC: before: "
                   f"{len(data)} - after: {len(data2)}", caplog)
예제 #3
0
def test_populate_indicators(hyperopt) -> None:
    tick = load_tickerdata_file(None, 'UNITTEST/BTC', '1m')
    tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick)}
    dataframes = hyperopt.strategy.tickerdata_to_dataframe(tickerlist)
    dataframe = hyperopt.custom_hyperopt.populate_indicators(
        dataframes['UNITTEST/BTC'], {'pair': 'UNITTEST/BTC'})

    # Check if some indicators are generated. We will not test all of them
    assert 'adx' in dataframe
    assert 'mfi' in dataframe
    assert 'rsi' in dataframe
예제 #4
0
def test_tickerdata_to_dataframe(default_conf) -> None:
    strategy = DefaultStrategy(default_conf)

    timerange = TimeRange(None, 'line', 0, -100)
    tick = load_tickerdata_file(None,
                                'UNITTEST/BTC',
                                '1m',
                                timerange=timerange)
    tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick)}
    data = strategy.tickerdata_to_dataframe(tickerlist)
    assert len(data['UNITTEST/BTC']) == 99  # partial candle was removed
예제 #5
0
def test_common_datearray(default_conf) -> None:
    strategy = DefaultStrategy(default_conf)
    tick = load_tickerdata_file(None, 'UNITTEST/BTC', '1m')
    tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick)}
    dataframes = strategy.tickerdata_to_dataframe(tickerlist)

    dates = common_datearray(dataframes)

    assert dates.size == dataframes['UNITTEST/BTC']['date'].size
    assert dates[0] == dataframes['UNITTEST/BTC']['date'][0]
    assert dates[-1] == dataframes['UNITTEST/BTC']['date'][-1]
예제 #6
0
def test_populate_indicators(hyperopt, testdatadir) -> None:
    tick = load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '1m')
    tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick, '1m', pair="UNITTEST/BTC",
                                                         fill_missing=True)}
    dataframes = hyperopt.backtesting.strategy.tickerdata_to_dataframe(tickerlist)
    dataframe = hyperopt.custom_hyperopt.populate_indicators(dataframes['UNITTEST/BTC'],
                                                             {'pair': 'UNITTEST/BTC'})

    # Check if some indicators are generated. We will not test all of them
    assert 'adx' in dataframe
    assert 'mfi' in dataframe
    assert 'rsi' in dataframe
예제 #7
0
def mocked_load_data(datadir,
                     pairs=[],
                     ticker_interval='0m',
                     refresh_pairs=False,
                     timerange=None,
                     exchange=None):
    tickerdata = history.load_tickerdata_file(datadir,
                                              'UNITTEST/BTC',
                                              '1m',
                                              timerange=timerange)
    pairdata = {'UNITTEST/BTC': parse_ticker_dataframe(tickerdata)}
    return pairdata
예제 #8
0
def load_pair_history(pair: str,
                      ticker_interval: str,
                      datadir: Optional[Path],
                      timerange: TimeRange = TimeRange(None, None, 0, 0),
                      refresh_pairs: bool = False,
                      exchange: Optional[Exchange] = None,
                      fill_up_missing: bool = True) -> DataFrame:
    """
    Loads cached ticker history for the given pair.
    :return: DataFrame with ohlcv data
    """

    # If the user force the refresh of pairs
    if refresh_pairs:
        if not exchange:
            raise OperationalException(
                "Exchange needs to be initialized when "
                "calling load_data with refresh_pairs=True")

        logger.info('Download data for pair and store them in %s', datadir)
        download_pair_history(datadir=datadir,
                              exchange=exchange,
                              pair=pair,
                              ticker_interval=ticker_interval,
                              timerange=timerange)

    pairdata = load_tickerdata_file(datadir,
                                    pair,
                                    ticker_interval,
                                    timerange=timerange)

    if pairdata:
        if timerange.starttype == 'date' and pairdata[0][
                0] > timerange.startts * 1000:
            logger.warning(
                'Missing data at start for pair %s, data starts at %s', pair,
                arrow.get(pairdata[0][0] //
                          1000).strftime('%Y-%m-%d %H:%M:%S'))
        if timerange.stoptype == 'date' and pairdata[-1][
                0] < timerange.stopts * 1000:
            logger.warning(
                'Missing data at end for pair %s, data ends at %s', pair,
                arrow.get(pairdata[-1][0] //
                          1000).strftime('%Y-%m-%d %H:%M:%S'))
        return parse_ticker_dataframe(pairdata, ticker_interval,
                                      fill_up_missing)
    else:
        logger.warning(
            'No data for pair: "%s", Interval: %s. '
            'Use --refresh-pairs-cached to download the data', pair,
            ticker_interval)
        return None
예제 #9
0
def mocked_load_data(datadir,
                     pairs=[],
                     timeframe='0m',
                     refresh_pairs=False,
                     timerange=None,
                     exchange=None,
                     *args,
                     **kwargs):
    hz = 0.1
    base = 0.001

    NEOBTC = [[
        ticker_start_time.shift(
            minutes=(x * ticker_interval_in_minute)).timestamp * 1000,
        math.sin(x * hz) / 1000 + base,
        math.sin(x * hz) / 1000 + base + 0.0001,
        math.sin(x * hz) / 1000 + base - 0.0001,
        math.sin(x * hz) / 1000 + base, 123.45
    ] for x in range(0, 500)]

    hz = 0.2
    base = 0.002
    LTCBTC = [[
        ticker_start_time.shift(
            minutes=(x * ticker_interval_in_minute)).timestamp * 1000,
        math.sin(x * hz) / 1000 + base,
        math.sin(x * hz) / 1000 + base + 0.0001,
        math.sin(x * hz) / 1000 + base - 0.0001,
        math.sin(x * hz) / 1000 + base, 123.45
    ] for x in range(0, 500)]

    pairdata = {
        'NEO/BTC':
        parse_ticker_dataframe(NEOBTC, '1h', pair="NEO/BTC",
                               fill_missing=True),
        'LTC/BTC':
        parse_ticker_dataframe(LTCBTC, '1h', pair="LTC/BTC", fill_missing=True)
    }
    return pairdata
예제 #10
0
def test_datesarray_to_datetimearray(ticker_history_list):
    dataframes = parse_ticker_dataframe(ticker_history_list)
    dates = datesarray_to_datetimearray(dataframes['date'])

    assert isinstance(dates[0], datetime.datetime)
    assert dates[0].year == 2017
    assert dates[0].month == 11
    assert dates[0].day == 26
    assert dates[0].hour == 8
    assert dates[0].minute == 50

    date_len = len(dates)
    assert date_len == 2
예제 #11
0
def load_pair_history(pair: str,
                      timeframe: str,
                      datadir: Path,
                      timerange: Optional[TimeRange] = None,
                      refresh_pairs: bool = False,
                      exchange: Optional[Exchange] = None,
                      fill_up_missing: bool = True,
                      drop_incomplete: bool = True,
                      startup_candles: int = 0,
                      ) -> DataFrame:
    """
    Loads cached ticker history for the given pair.
    :param pair: Pair to load data for
    :param timeframe: Ticker timeframe (e.g. "5m")
    :param datadir: Path to the data storage location.
    :param timerange: Limit data to be loaded to this timerange
    :param refresh_pairs: Refresh pairs from exchange.
        (Note: Requires exchange to be passed as well.)
    :param exchange: Exchange object (needed when using "refresh_pairs")
    :param fill_up_missing: Fill missing values with "No action"-candles
    :param drop_incomplete: Drop last candle assuming it may be incomplete.
    :param startup_candles: Additional candles to load at the start of the period
    :return: DataFrame with ohlcv data, or empty DataFrame
    """

    timerange_startup = deepcopy(timerange)
    if startup_candles > 0 and timerange_startup:
        timerange_startup.subtract_start(timeframe_to_seconds(timeframe) * startup_candles)

    # The user forced the refresh of pairs
    if refresh_pairs:
        download_pair_history(datadir=datadir,
                              exchange=exchange,
                              pair=pair,
                              timeframe=timeframe,
                              timerange=timerange)

    pairdata = load_tickerdata_file(datadir, pair, timeframe, timerange=timerange_startup)

    if pairdata:
        if timerange_startup:
            _validate_pairdata(pair, pairdata, timerange_startup)
        return parse_ticker_dataframe(pairdata, timeframe, pair=pair,
                                      fill_missing=fill_up_missing,
                                      drop_incomplete=drop_incomplete)
    else:
        logger.warning(
            f'No history data for pair: "{pair}", timeframe: {timeframe}. '
            'Use `freqtrade download-data` to download the data'
        )
        return DataFrame()
예제 #12
0
def load_data_test(what):
    timerange = TimeRange(None, 'line', 0, -101)
    pair = history.load_tickerdata_file(None,
                                        ticker_interval='1m',
                                        pair='UNITTEST/BTC',
                                        timerange=timerange)
    datalen = len(pair)

    base = 0.001
    if what == 'raise':
        data = [
            [
                pair[x][0],  # Keep old dates
                x * base,  # But replace O,H,L,C
                x * base + 0.0001,
                x * base - 0.0001,
                x * base,
                pair[x][5],  # Keep old volume
            ] for x in range(0, datalen)
        ]
    if what == 'lower':
        data = [
            [
                pair[x][0],  # Keep old dates
                1 - x * base,  # But replace O,H,L,C
                1 - x * base + 0.0001,
                1 - x * base - 0.0001,
                1 - x * base,
                pair[x][5]  # Keep old volume
            ] for x in range(0, datalen)
        ]
    if what == 'sine':
        hz = 0.1  # frequency
        data = [
            [
                pair[x][0],  # Keep old dates
                math.sin(x * hz) / 1000 + base,  # But replace O,H,L,C
                math.sin(x * hz) / 1000 + base + 0.0001,
                math.sin(x * hz) / 1000 + base - 0.0001,
                math.sin(x * hz) / 1000 + base,
                pair[x][5]  # Keep old volume
            ] for x in range(0, datalen)
        ]
    return {
        'UNITTEST/BTC':
        parse_ticker_dataframe(data,
                               '1m',
                               pair="UNITTEST/BTC",
                               fill_missing=True)
    }
예제 #13
0
def test_datesarray_to_datetimearray(ticker_history_list):
    dataframes = parse_ticker_dataframe(ticker_history_list, "5m", pair="UNITTEST/BTC",
                                        fill_missing=True)
    dates = datesarray_to_datetimearray(dataframes['date'])

    assert isinstance(dates[0], datetime.datetime)
    assert dates[0].year == 2017
    assert dates[0].month == 11
    assert dates[0].day == 26
    assert dates[0].hour == 8
    assert dates[0].minute == 50

    date_len = len(dates)
    assert date_len == 2
예제 #14
0
def test_tickerdata_to_dataframe_bt(default_conf, mocker) -> None:
    patch_exchange(mocker)
    timerange = TimeRange(None, 'line', 0, -100)
    tick = history.load_tickerdata_file(None, 'UNITTEST/BTC', '1m', timerange=timerange)
    tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick, '1m', fill_missing=True)}

    backtesting = Backtesting(default_conf)
    data = backtesting.strategy.tickerdata_to_dataframe(tickerlist)
    assert len(data['UNITTEST/BTC']) == 102

    # Load strategy to compare the result between Backtesting function and strategy are the same
    strategy = DefaultStrategy(default_conf)
    data2 = strategy.tickerdata_to_dataframe(tickerlist)
    assert data['UNITTEST/BTC'].equals(data2['UNITTEST/BTC'])
예제 #15
0
    def refresh_latest_ohlcv(
            self, pair_list: List[Tuple[str, str]]) -> List[Tuple[str, List]]:
        """
        Refresh in-memory ohlcv asynchronously and set `_klines` with the result
        Loops asynchronously over pair_list and downloads all pairs async (semi-parallel).
        :param pair_list: List of 2 element tuples containing pair, interval to refresh
        :return: Returns a List of ticker-dataframes.
        """
        logger.debug("Refreshing ohlcv data for %d pairs", len(pair_list))

        input_coroutines = []

        # Gather coroutines to run
        for pair, timeframe in set(pair_list):
            if (not ((pair, timeframe) in self._klines)
                    or self._now_is_time_to_refresh(pair, timeframe)):
                input_coroutines.append(
                    self._async_get_candle_history(pair, timeframe))
            else:
                logger.debug(
                    "Using cached ohlcv data for pair %s, timeframe %s ...",
                    pair, timeframe)

        tickers = asyncio.get_event_loop().run_until_complete(
            asyncio.gather(*input_coroutines, return_exceptions=True))

        # handle caching
        for res in tickers:
            if isinstance(res, Exception):
                logger.warning("Async code raised an exception: %s",
                               res.__class__.__name__)
                continue
            pair = res[0]
            timeframe = res[1]
            ticks = res[2]
            # keeping last candle time as last refreshed time of the pair
            if ticks:
                self._pairs_last_refresh_time[(
                    pair, timeframe)] = ticks[-1][0] // 1000
            # keeping parsed dataframe in cache
            self._klines[(pair, timeframe)] = parse_ticker_dataframe(
                ticks,
                timeframe,
                pair=pair,
                fill_missing=True,
                drop_incomplete=self._ohlcv_partial_candle)
        return tickers
예제 #16
0
def test_tickerdata_to_dataframe(default_conf, testdatadir) -> None:
    strategy = DefaultStrategy(default_conf)

    timerange = TimeRange.parse_timerange('1510694220-1510700340')
    tick = load_tickerdata_file(testdatadir,
                                'UNITTEST/BTC',
                                '1m',
                                timerange=timerange)
    tickerlist = {
        'UNITTEST/BTC':
        parse_ticker_dataframe(tick,
                               '1m',
                               pair="UNITTEST/BTC",
                               fill_missing=True)
    }
    data = strategy.tickerdata_to_dataframe(tickerlist)
    assert len(data['UNITTEST/BTC']) == 102  # partial candle was removed
예제 #17
0
def load_pair_history(pair: str,
                      ticker_interval: str,
                      datadir: Path,
                      timerange: Optional[TimeRange] = None,
                      refresh_pairs: bool = False,
                      exchange: Optional[Exchange] = None,
                      fill_up_missing: bool = True,
                      drop_incomplete: bool = True
                      ) -> DataFrame:
    """
    Loads cached ticker history for the given pair.
    :param pair: Pair to load data for
    :param ticker_interval: Ticker-interval (e.g. "5m")
    :param datadir: Path to the data storage location.
    :param timerange: Limit data to be loaded to this timerange
    :param refresh_pairs: Refresh pairs from exchange.
        (Note: Requires exchange to be passed as well.)
    :param exchange: Exchange object (needed when using "refresh_pairs")
    :param fill_up_missing: Fill missing values with "No action"-candles
    :param drop_incomplete: Drop last candle assuming it may be incomplete.
    :return: DataFrame with ohlcv data
    """

    # The user forced the refresh of pairs
    if refresh_pairs:
        download_pair_history(datadir=datadir,
                              exchange=exchange,
                              pair=pair,
                              ticker_interval=ticker_interval,
                              timerange=timerange)

    pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange)

    if pairdata:
        if timerange:
            _validate_pairdata(pair, pairdata, timerange)
        return parse_ticker_dataframe(pairdata, ticker_interval, pair=pair,
                                      fill_missing=fill_up_missing,
                                      drop_incomplete=drop_incomplete)
    else:
        logger.warning(
            f'No history data for pair: "{pair}", interval: {ticker_interval}. '
            'Use `freqtrade download-data` to download the data'
        )
        return None
예제 #18
0
    def refresh_latest_ohlcv(
            self, pair_list: List[Tuple[str, str]]) -> List[Tuple[str, List]]:
        """
        Refresh in-memory ohlcv asyncronously and set `_klines` with the result
        """
        logger.debug("Refreshing ohlcv data for %d pairs", len(pair_list))

        input_coroutines = []

        # Gather corotines to run
        for pair, ticker_interval in set(pair_list):
            # Calculating ticker interval in second
            interval_in_sec = constants.TICKER_INTERVAL_MINUTES[
                ticker_interval] * 60

            if not ((self._pairs_last_refresh_time.get(
                (pair, ticker_interval), 0) + interval_in_sec) >=
                    arrow.utcnow().timestamp and
                    (pair, ticker_interval) in self._klines):
                input_coroutines.append(
                    self._async_get_candle_history(pair, ticker_interval))
            else:
                logger.debug("Using cached ohlcv data for %s, %s ...", pair,
                             ticker_interval)

        tickers = asyncio.get_event_loop().run_until_complete(
            asyncio.gather(*input_coroutines, return_exceptions=True))

        # handle caching
        for res in tickers:
            if isinstance(res, Exception):
                logger.warning("Async code raised an exception: %s",
                               res.__class__.__name__)
                continue
            pair = res[0]
            tick_interval = res[1]
            ticks = res[2]
            # keeping last candle time as last refreshed time of the pair
            if ticks:
                self._pairs_last_refresh_time[(
                    pair, tick_interval)] = ticks[-1][0] // 1000
            # keeping parsed dataframe in cache
            self._klines[(pair, tick_interval)] = parse_ticker_dataframe(
                ticks, tick_interval, fill_missing=True)
        return tickers
예제 #19
0
def mocked_load_data(datadir,
                     pairs=[],
                     timeframe='0m',
                     timerange=None,
                     *args,
                     **kwargs):
    tickerdata = history.load_tickerdata_file(datadir,
                                              'UNITTEST/BTC',
                                              '1m',
                                              timerange=timerange)
    pairdata = {
        'UNITTEST/BTC':
        parse_ticker_dataframe(tickerdata,
                               '1m',
                               pair="UNITTEST/BTC",
                               fill_missing=True)
    }
    return pairdata
예제 #20
0
    def update_plots(pairs, timeframe):
        if pairs is not None:
            exchange = self.exchange
            # Get ticker datas
            if exchange.has['fetchOHLCV']:
                plots = []
                plot_counter = 0
                for pair in pairs:
                    plot_counter += 1
                    plot_id = f'plot_id_{plot_counter}'
                    tickers = exchange.fetch_ohlcv(pair, timeframe)
                    df = parse_ticker_dataframe(tickers, timeframe)
                    figure = get_plot_fig(df, pair)
                    figure['layout']['height'] = 800
                    plots.append(get_plot_component(df, figure, plot_id))
                    self.plot_ids.update({pair: plot_id})

                print("plot IDs : ")
                print(self.plot_ids)
            return html.Div(plots)
예제 #21
0
def test_buy_strategy_generator(hyperopt, testdatadir) -> None:
    tick = load_tickerdata_file(testdatadir, 'UNITTEST/BTC', '1m')
    tickerlist = {
        'UNITTEST/BTC':
        parse_ticker_dataframe(tick,
                               '1m',
                               pair="UNITTEST/BTC",
                               fill_missing=True)
    }
    dataframes = hyperopt.backtesting.strategy.tickerdata_to_dataframe(
        tickerlist)
    dataframe = hyperopt.custom_hyperopt.populate_indicators(
        dataframes['UNITTEST/BTC'], {'pair': 'UNITTEST/BTC'})

    populate_buy_trend = hyperopt.custom_hyperopt.buy_strategy_generator({
        'adx-value':
        20,
        'fastd-value':
        20,
        'mfi-value':
        20,
        'rsi-value':
        20,
        'adx-enabled':
        True,
        'fastd-enabled':
        True,
        'mfi-enabled':
        True,
        'rsi-enabled':
        True,
        'trigger':
        'bb_lower'
    })
    result = populate_buy_trend(dataframe, {'pair': 'UNITTEST/BTC'})
    # Check if some indicators are generated. We will not test all of them
    assert 'buy' in result
    assert 1 in result['buy']
예제 #22
0
def parse_contents(contents, filename, date):
    content_type, content_string = contents.split(',')
    decoded = base64.b64decode(content_string)

    try:
        if 'json' in filename:
            # Assume that the user uploaded a JSON file
            timeframe = re.search('-(.*).json', filename).group(1)

            df = pd.read_json(
                io.StringIO(decoded.decode('utf-8')))

            df = parse_ticker_dataframe(df.values, timeframe, False)
            df.set_index('date', inplace=True)

    except Exception as e:
        raise
        return html.Div([
            'There was an error processing this file.'
        ])
    plot_title = filename.replace("/", "_")
    file_obj = {'df': df, 'tf': timeframe, 'title': plot_title}
    return file_obj
예제 #23
0
def test_dataframe_correct_length(result):
    dataframe = parse_ticker_dataframe(result)
    assert len(result.index) - 1 == len(
        dataframe.index)  # last partial candle removed
예제 #24
0
파일: conftest.py 프로젝트: sprgn/freqtrade
def result():
    with open('freqtrade/tests/testdata/UNITTEST_BTC-1m.json') as data_file:
        return parse_ticker_dataframe(json.load(data_file), '1m', True)
예제 #25
0
파일: conftest.py 프로젝트: sprgn/freqtrade
def ticker_history(ticker_history_list):
    return parse_ticker_dataframe(ticker_history_list, "5m", True)
def result():
    with open('freqtrade/tests/testdata/ETH_BTC-1m.json') as data_file:
        return parse_ticker_dataframe(json.load(data_file), '1m', pair="UNITTEST/BTC",
                                      fill_missing=True)
예제 #27
0
def ticker_history(ticker_history_list):
    return parse_ticker_dataframe(ticker_history_list,
                                  "5m",
                                  pair="UNITTEST/BTC",
                                  fill_missing=True)
예제 #28
0
def result(testdatadir):
    with (testdatadir / 'UNITTEST_BTC-1m.json').open('r') as data_file:
        return parse_ticker_dataframe(json.load(data_file),
                                      '1m',
                                      pair="UNITTEST/BTC",
                                      fill_missing=True)
예제 #29
0
def _download_pair_history(datadir: Path,
                           exchange: Exchange,
                           pair: str, *,
                           timeframe: str = '5m',
                           timerange: Optional[TimeRange] = None,
                           data_handler: IDataHandler = None) -> bool:
    """
    Download latest candles from the exchange for the pair and timeframe passed in parameters
    The data is downloaded starting from the last correct data that
    exists in a cache. If timerange starts earlier than the data in the cache,
    the full data will be redownloaded

    Based on @Rybolov work: https://github.com/rybolov/freqtrade-data

    :param pair: pair to download
    :param timeframe: Ticker Timeframe (e.g 5m)
    :param timerange: range of time to download
    :return: bool with success state
    """
    data_handler = get_datahandler(datadir, data_handler=data_handler)

    try:
        logger.info(
            f'Download history data for pair: "{pair}", timeframe: {timeframe} '
            f'and store in {datadir}.'
        )

        # data, since_ms = _load_cached_data_for_updating_old(datadir, pair, timeframe, timerange)
        data, since_ms = _load_cached_data_for_updating(pair, timeframe, timerange,
                                                        data_handler=data_handler)

        logger.debug("Current Start: %s",
                     f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
        logger.debug("Current End: %s",
                     f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')

        # Default since_ms to 30 days if nothing is given
        new_data = exchange.get_historic_ohlcv(pair=pair,
                                               timeframe=timeframe,
                                               since_ms=since_ms if since_ms else
                                               int(arrow.utcnow().shift(
                                                   days=-30).float_timestamp) * 1000
                                               )
        # TODO: Maybe move parsing to exchange class (?)
        new_dataframe = parse_ticker_dataframe(new_data, timeframe, pair,
                                               fill_missing=False, drop_incomplete=True)
        if data.empty:
            data = new_dataframe
        else:
            data = data.append(new_dataframe)

        logger.debug("New  Start: %s",
                     f"{data.iloc[0]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')
        logger.debug("New End: %s",
                     f"{data.iloc[-1]['date']:%Y-%m-%d %H:%M:%S}" if not data.empty else 'None')

        data_handler.ohlcv_store(pair, timeframe, data=data)
        return True

    except Exception as e:
        logger.error(
            f'Failed to download history data for pair: "{pair}", timeframe: {timeframe}. '
            f'Error: {e}'
        )
        return False
예제 #30
0
def result():
    with open('freqtrade/tests/testdata/ETH_BTC-1m.json') as data_file:
        return parse_ticker_dataframe(json.load(data_file))