def test_load_partial_missing(caplog) -> None: # Make sure we start fresh - test missing data at start start = arrow.get('2018-01-01T00:00:00') end = arrow.get('2018-01-11T00:00:00') tickerdata = history.load_data(None, '5m', ['UNITTEST/BTC'], refresh_pairs=False, timerange=TimeRange('date', 'date', start.timestamp, end.timestamp)) # timedifference in 5 minutes td = ((end - start).total_seconds() // 60 // 5) + 1 assert td != len(tickerdata['UNITTEST/BTC']) start_real = tickerdata['UNITTEST/BTC'].iloc[0, 0] assert log_has(f'Missing data at start for pair ' f'UNITTEST/BTC, data starts at {start_real.strftime("%Y-%m-%d %H:%M:%S")}', caplog.record_tuples) # Make sure we start fresh - test missing data at end caplog.clear() start = arrow.get('2018-01-10T00:00:00') end = arrow.get('2018-02-20T00:00:00') tickerdata = history.load_data(datadir=None, ticker_interval='5m', pairs=['UNITTEST/BTC'], refresh_pairs=False, timerange=TimeRange('date', 'date', start.timestamp, end.timestamp)) # timedifference in 5 minutes td = ((end - start).total_seconds() // 60 // 5) + 1 assert td != len(tickerdata['UNITTEST/BTC']) # Shift endtime with +5 - as last candle is dropped (partial candle) end_real = arrow.get(tickerdata['UNITTEST/BTC'].iloc[-1, 0]).shift(minutes=5) assert log_has(f'Missing data at end for pair ' f'UNITTEST/BTC, data ends at {end_real.strftime("%Y-%m-%d %H:%M:%S")}', caplog.record_tuples)
def test_parse_timerange_incorrect() -> None: assert TimeRange(None, 'line', 0, -200) == Arguments.parse_timerange('-200') assert TimeRange('line', None, 200, 0) == Arguments.parse_timerange('200-') assert TimeRange('index', 'index', 200, 500) == Arguments.parse_timerange('200-500') assert TimeRange('date', None, 1274486400, 0) == Arguments.parse_timerange('20100522-') assert TimeRange(None, 'date', 0, 1274486400) == Arguments.parse_timerange('-20100522') timerange = Arguments.parse_timerange('20100522-20150730') assert timerange == TimeRange('date', 'date', 1274486400, 1438214400) # Added test for unix timestamp - BTC genesis date assert TimeRange('date', None, 1231006505, 0) == Arguments.parse_timerange('1231006505-') assert TimeRange(None, 'date', 0, 1233360000) == Arguments.parse_timerange('-1233360000') timerange = Arguments.parse_timerange('1231006505-1233360000') assert TimeRange('date', 'date', 1231006505, 1233360000) == timerange # TODO: Find solution for the following case (passing timestamp in ms) timerange = Arguments.parse_timerange('1231006505000-1233360000000') assert TimeRange('date', 'date', 1231006505, 1233360000) != timerange with pytest.raises(Exception, match=r'Incorrect syntax.*'): Arguments.parse_timerange('-')
def load_data(datadir: str, ticker_interval: str, pairs: List[str], refresh_pairs: Optional[bool] = False, exchange: Optional[Exchange] = None, timerange: TimeRange = TimeRange(None, None, 0, 0)) -> Dict[str, List]: """ Loads ticker history data for the given parameters :return: dict """ result = {} # If the user force the refresh of pairs if refresh_pairs: logger.info('Download data for all pairs and store them in %s', datadir) if not exchange: raise OperationalException("Exchange needs to be initialized when " "calling load_data with refresh_pairs=True") download_pairs(datadir, exchange, pairs, ticker_interval, timerange=timerange) for pair in pairs: pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange) if pairdata: result[pair] = pairdata else: logger.warning( 'No data for pair: "%s", Interval: %s. ' 'Use --refresh-pairs-cached to download the data', pair, ticker_interval ) return result
def load_data_test(what): timerange = TimeRange(None, 'line', 0, -101) data = optimize.load_data(None, ticker_interval='1m', pairs=['UNITTEST/BTC'], timerange=timerange) pair = data['UNITTEST/BTC'] datalen = len(pair) # Depending on the what parameter we now adjust the # loaded data looks: # pair :: [[ 1509836520000, unix timestamp in ms # 0.00162008, open # 0.00162008, high # 0.00162008, low # 0.00162008, close # 108.14853839 base volume # ]] base = 0.001 if what == 'raise': return { 'UNITTEST/BTC': [ [ pair[x][0], # Keep old dates x * base, # But replace O,H,L,C x * base + 0.0001, x * base - 0.0001, x * base, pair[x][5], # Keep old volume ] for x in range(0, datalen) ] } if what == 'lower': return { 'UNITTEST/BTC': [ [ pair[x][0], # Keep old dates 1 - x * base, # But replace O,H,L,C 1 - x * base + 0.0001, 1 - x * base - 0.0001, 1 - x * base, pair[x][5] # Keep old volume ] for x in range(0, datalen) ] } if what == 'sine': hz = 0.1 # frequency return { 'UNITTEST/BTC': [ [ pair[x][0], # Keep old dates math.sin(x * hz) / 1000 + base, # But replace O,H,L,C math.sin(x * hz) / 1000 + base + 0.0001, math.sin(x * hz) / 1000 + base - 0.0001, math.sin(x * hz) / 1000 + base, pair[x][5] # Keep old volume ] for x in range(0, datalen) ] } return data
def load_pair_history(pair: str, ticker_interval: str, datadir: Optional[Path], timerange: TimeRange = TimeRange(None, None, 0, 0), refresh_pairs: bool = False, exchange: Optional[Exchange] = None, fill_up_missing: bool = True, drop_incomplete: bool = True) -> DataFrame: """ Loads cached ticker history for the given pair. :param pair: Pair to load data for :param ticker_interval: Ticker-interval (e.g. "5m") :param datadir: Path to the data storage location. :param timerange: Limit data to be loaded to this timerange :param refresh_pairs: Refresh pairs from exchange. (Note: Requires exchange to be passed as well.) :param exchange: Exchange object (needed when using "refresh_pairs") :param fill_up_missing: Fill missing values with "No action"-candles :param drop_incomplete: Drop last candle assuming it may be incomplete. :return: DataFrame with ohlcv data """ # The user forced the refresh of pairs if refresh_pairs: download_pair_history(datadir=datadir, exchange=exchange, pair=pair, ticker_interval=ticker_interval, timerange=timerange) pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange) if pairdata: if timerange and timerange.starttype == 'date' and pairdata[0][ 0] > timerange.startts * 1000: logger.warning( 'Missing data at start for pair %s, data starts at %s', pair, arrow.get(pairdata[0][0] // 1000).strftime('%Y-%m-%d %H:%M:%S')) if timerange and timerange.stoptype == 'date' and pairdata[-1][ 0] < timerange.stopts * 1000: logger.warning( 'Missing data at end for pair %s, data ends at %s', pair, arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S')) return parse_ticker_dataframe(pairdata, ticker_interval, pair=pair, fill_missing=fill_up_missing, drop_incomplete=drop_incomplete) else: logger.warning( f'No history data for pair: "{pair}", interval: {ticker_interval}. ' 'Use --refresh-pairs-cached option or download_backtest_data.py ' 'script to download the data') return None
def test_backtest(default_conf, fee, mocker) -> None: mocker.patch('freqtrade.exchange.Exchange.get_fee', fee) patch_exchange(mocker) backtesting = Backtesting(default_conf) pair = 'UNITTEST/BTC' timerange = TimeRange(None, 'line', 0, -201) data = history.load_data(datadir=None, ticker_interval='5m', pairs=['UNITTEST/BTC'], timerange=timerange) data_processed = backtesting.strategy.tickerdata_to_dataframe(data) min_date, max_date = get_timeframe(data_processed) results = backtesting.backtest({ 'stake_amount': default_conf['stake_amount'], 'processed': data_processed, 'max_open_trades': 10, 'position_stacking': False, 'start_date': min_date, 'end_date': max_date, }) assert not results.empty assert len(results) == 2 expected = pd.DataFrame({ 'pair': [pair, pair], 'profit_percent': [0.0, 0.0], 'profit_abs': [0.0, 0.0], 'open_time': pd.to_datetime([ Arrow(2018, 1, 29, 18, 40, 0).datetime, Arrow(2018, 1, 30, 3, 30, 0).datetime ], utc=True), 'close_time': pd.to_datetime([ Arrow(2018, 1, 29, 22, 35, 0).datetime, Arrow(2018, 1, 30, 4, 10, 0).datetime ], utc=True), 'open_index': [78, 184], 'close_index': [125, 192], 'trade_duration': [235, 40], 'open_at_end': [False, False], 'open_rate': [0.104445, 0.10302485], 'close_rate': [0.104969, 0.103541], 'sell_reason': [SellType.ROI, SellType.ROI] }) pd.testing.assert_frame_equal(results, expected) data_pair = data_processed[pair] for _, t in results.iterrows(): ln = data_pair.loc[data_pair["date"] == t["open_time"]] # Check open trade rate alignes to open rate assert ln is not None assert round(ln.iloc[0]["open"], 6) == round(t["open_rate"], 6) # check close trade rate alignes to close rate or is between high and low ln = data_pair.loc[data_pair["date"] == t["close_time"]] assert (round(ln.iloc[0]["open"], 6) == round(t["close_rate"], 6) or round(ln.iloc[0]["low"], 6) < round( t["close_rate"], 6) < round(ln.iloc[0]["high"], 6))
def test_tickerdata_to_dataframe(default_conf) -> None: strategy = DefaultStrategy(default_conf) timerange = TimeRange(None, 'line', 0, -100) tick = load_tickerdata_file(None, 'UNITTEST/BTC', '1m', timerange=timerange) tickerlist = {'UNITTEST/BTC': tick} data = strategy.tickerdata_to_dataframe(tickerlist) assert len(data['UNITTEST/BTC']) == 99 # partial candle was removed
def load_pair_history(pair: str, ticker_interval: str, datadir: Optional[Path], timerange: TimeRange = TimeRange(None, None, 0, 0), refresh_pairs: bool = False, exchange: Optional[Exchange] = None, fill_up_missing: bool = True) -> DataFrame: """ Loads cached ticker history for the given pair. :return: DataFrame with ohlcv data """ # If the user force the refresh of pairs if refresh_pairs: if not exchange: raise OperationalException( "Exchange needs to be initialized when " "calling load_data with refresh_pairs=True") logger.info('Download data for pair and store them in %s', datadir) download_pair_history(datadir=datadir, exchange=exchange, pair=pair, ticker_interval=ticker_interval, timerange=timerange) pairdata = load_tickerdata_file(datadir, pair, ticker_interval, timerange=timerange) if pairdata: if timerange.starttype == 'date' and pairdata[0][ 0] > timerange.startts * 1000: logger.warning( 'Missing data at start for pair %s, data starts at %s', pair, arrow.get(pairdata[0][0] // 1000).strftime('%Y-%m-%d %H:%M:%S')) if timerange.stoptype == 'date' and pairdata[-1][ 0] < timerange.stopts * 1000: logger.warning( 'Missing data at end for pair %s, data ends at %s', pair, arrow.get(pairdata[-1][0] // 1000).strftime('%Y-%m-%d %H:%M:%S')) return parse_ticker_dataframe(pairdata, ticker_interval, fill_up_missing) else: logger.warning( 'No data for pair: "%s", Interval: %s. ' 'Use --refresh-pairs-cached to download the data', pair, ticker_interval) return None
def test_tickerdata_to_dataframe(default_conf) -> None: """ Test Analyze.tickerdata_to_dataframe() method """ analyze = Analyze(default_conf) timerange = TimeRange(None, 'line', 0, -100) tick = load_tickerdata_file(None, 'UNITTEST/BTC', '1m', timerange=timerange) tickerlist = {'UNITTEST/BTC': tick} data = analyze.tickerdata_to_dataframe(tickerlist) assert len(data['UNITTEST/BTC']) == 99 # partial candle was removed
def test_generate_graph_no_trades(default_conf, mocker): row_mock = mocker.patch('freqtrade.plot.plotting.generate_row', MagicMock(side_effect=fig_generating_mock)) trades_mock = mocker.patch('freqtrade.plot.plotting.plot_trades', MagicMock(side_effect=fig_generating_mock)) pair = 'UNITTEST/BTC' timerange = TimeRange(None, 'line', 0, -1000) data = history.load_pair_history(pair=pair, ticker_interval='1m', datadir=None, timerange=timerange) # Generate buy/sell signals and indicators strat = DefaultStrategy(default_conf) data = strat.analyze_ticker(data, {'pair': pair}) indicators1 = [] indicators2 = [] fig = generate_graph(pair=pair, data=data, trades=None, indicators1=indicators1, indicators2=indicators2) assert isinstance(fig, go.Figure) assert fig.layout.title.text == pair figure = fig.layout.figure assert len(figure.data) == 6 # Candlesticks are plotted first candles = find_trace_in_fig_data(figure.data, "Price") assert isinstance(candles, go.Candlestick) volume = find_trace_in_fig_data(figure.data, "Volume") assert isinstance(volume, go.Bar) buy = find_trace_in_fig_data(figure.data, "buy") assert isinstance(buy, go.Scatter) # All buy-signals should be plotted assert int(data.buy.sum()) == len(buy.x) sell = find_trace_in_fig_data(figure.data, "sell") assert isinstance(sell, go.Scatter) # All buy-signals should be plotted assert int(data.sell.sum()) == len(sell.x) assert find_trace_in_fig_data(figure.data, "BB lower") assert find_trace_in_fig_data(figure.data, "BB upper") assert row_mock.call_count == 2 assert trades_mock.call_count == 1
def load_data_test(what): timerange = TimeRange(None, 'line', 0, -101) pair = history.load_tickerdata_file(None, ticker_interval='1m', pair='UNITTEST/BTC', timerange=timerange) datalen = len(pair) base = 0.001 if what == 'raise': data = [ [ pair[x][0], # Keep old dates x * base, # But replace O,H,L,C x * base + 0.0001, x * base - 0.0001, x * base, pair[x][5], # Keep old volume ] for x in range(0, datalen) ] if what == 'lower': data = [ [ pair[x][0], # Keep old dates 1 - x * base, # But replace O,H,L,C 1 - x * base + 0.0001, 1 - x * base - 0.0001, 1 - x * base, pair[x][5] # Keep old volume ] for x in range(0, datalen) ] if what == 'sine': hz = 0.1 # frequency data = [ [ pair[x][0], # Keep old dates math.sin(x * hz) / 1000 + base, # But replace O,H,L,C math.sin(x * hz) / 1000 + base + 0.0001, math.sin(x * hz) / 1000 + base - 0.0001, math.sin(x * hz) / 1000 + base, pair[x][5] # Keep old volume ] for x in range(0, datalen) ] return { 'UNITTEST/BTC': parse_ticker_dataframe(data, '1m', pair="UNITTEST/BTC", fill_missing=True) }
def test_tickerdata_to_dataframe_bt(default_conf, mocker) -> None: patch_exchange(mocker) timerange = TimeRange(None, 'line', 0, -100) tick = history.load_tickerdata_file(None, 'UNITTEST/BTC', '1m', timerange=timerange) tickerlist = {'UNITTEST/BTC': parse_ticker_dataframe(tick, '1m', fill_missing=True)} backtesting = Backtesting(default_conf) data = backtesting.strategy.tickerdata_to_dataframe(tickerlist) assert len(data['UNITTEST/BTC']) == 102 # Load strategy to compare the result between Backtesting function and strategy are the same strategy = DefaultStrategy(default_conf) data2 = strategy.tickerdata_to_dataframe(tickerlist) assert data['UNITTEST/BTC'].equals(data2['UNITTEST/BTC'])
def test_validate_backtest_data(default_conf, mocker, caplog) -> None: patch_exchange(mocker) strategy = DefaultStrategy(default_conf) timerange = TimeRange('index', 'index', 200, 250) data = strategy.tickerdata_to_dataframe( history.load_data(datadir=None, ticker_interval='5m', pairs=['UNITTEST/BTC'], timerange=timerange)) min_date, max_date = optimize.get_timeframe(data) caplog.clear() assert not optimize.validate_backtest_data( data, min_date, max_date, constants.TICKER_INTERVAL_MINUTES["5m"]) assert len(caplog.record_tuples) == 0
def test_validate_backtest_data(default_conf, mocker, caplog) -> None: patch_exchange(mocker) strategy = DefaultStrategy(default_conf) timerange = TimeRange('index', 'index', 200, 250) data = strategy.tickerdata_to_dataframe( history.load_data(datadir=None, ticker_interval='5m', pairs=['UNITTEST/BTC'], timerange=timerange)) min_date, max_date = history.get_timeframe(data) caplog.clear() assert not history.validate_backtest_data( data['UNITTEST/BTC'], 'UNITTEST/BTC', min_date, max_date, timeframe_to_minutes('5m')) assert len(caplog.record_tuples) == 0
def test_generate_row(default_conf, caplog): pair = "UNITTEST/BTC" timerange = TimeRange(None, 'line', 0, -1000) data = history.load_pair_history(pair=pair, ticker_interval='1m', datadir=None, timerange=timerange) indicators1 = ["ema10"] indicators2 = ["macd"] # Generate buy/sell signals and indicators strat = DefaultStrategy(default_conf) data = strat.analyze_ticker(data, {'pair': pair}) fig = generage_empty_figure() # Row 1 fig1 = generate_row(fig=deepcopy(fig), row=1, indicators=indicators1, data=data) figure = fig1.layout.figure ema10 = find_trace_in_fig_data(figure.data, "ema10") assert isinstance(ema10, go.Scatter) assert ema10.yaxis == "y" fig2 = generate_row(fig=deepcopy(fig), row=3, indicators=indicators2, data=data) figure = fig2.layout.figure macd = find_trace_in_fig_data(figure.data, "macd") assert isinstance(macd, go.Scatter) assert macd.yaxis == "y3" # No indicator found fig3 = generate_row(fig=deepcopy(fig), row=3, indicators=['no_indicator'], data=data) assert fig == fig3 assert log_has_re(r'Indicator "no_indicator" ignored\..*', caplog.record_tuples)
def download_pairs(datadir, exchange: Exchange, pairs: List[str], ticker_interval: str, timerange: TimeRange = TimeRange(None, None, 0, 0)) -> bool: """For each pairs passed in parameters, download the ticker intervals""" for pair in pairs: try: download_backtesting_testdata(datadir, exchange=exchange, pair=pair, tick_interval=ticker_interval, timerange=timerange) except BaseException: logger.info( 'Failed to download the pair: "%s", Interval: %s', pair, ticker_interval ) return False return True
def test_tickerdata_to_dataframe(default_conf, mocker) -> None: """ Test Backtesting.tickerdata_to_dataframe() method """ patch_exchange(mocker) timerange = TimeRange(None, 'line', 0, -100) tick = optimize.load_tickerdata_file(None, 'UNITTEST/BTC', '1m', timerange=timerange) tickerlist = {'UNITTEST/BTC': tick} backtesting = Backtesting(default_conf) data = backtesting.tickerdata_to_dataframe(tickerlist) assert len(data['UNITTEST/BTC']) == 99 # Load Analyze to compare the result between Backtesting function and Analyze are the same analyze = Analyze(default_conf) data2 = analyze.tickerdata_to_dataframe(tickerlist) assert data['UNITTEST/BTC'].equals(data2['UNITTEST/BTC'])
def load_data(datadir: Optional[Path], ticker_interval: str, pairs: List[str], refresh_pairs: bool = False, exchange: Optional[Exchange] = None, timerange: TimeRange = TimeRange(None, None, 0, 0)) -> Dict[str, DataFrame]: """ Loads ticker history data for a list of pairs the given parameters :return: dict(<pair>:<tickerlist>) """ result = {} for pair in pairs: hist = load_pair_history(pair=pair, ticker_interval=ticker_interval, datadir=datadir, timerange=timerange, refresh_pairs=refresh_pairs, exchange=exchange) if hist is not None: result[pair] = hist return result
def test_extract_trades_of_period(): pair = "UNITTEST/BTC" timerange = TimeRange(None, 'line', 0, -1000) data = load_pair_history(pair=pair, ticker_interval='1m', datadir=None, timerange=timerange) # timerange = 2017-11-14 06:07 - 2017-11-14 22:58:00 trades = DataFrame({ 'pair': [pair, pair, pair, pair], 'profit_percent': [0.0, 0.1, -0.2, -0.5], 'profit_abs': [0.0, 1, -2, -5], 'open_time': to_datetime([ Arrow(2017, 11, 13, 15, 40, 0).datetime, Arrow(2017, 11, 14, 9, 41, 0).datetime, Arrow(2017, 11, 14, 14, 20, 0).datetime, Arrow(2017, 11, 15, 3, 40, 0).datetime, ], utc=True), 'close_time': to_datetime([ Arrow(2017, 11, 13, 16, 40, 0).datetime, Arrow(2017, 11, 14, 10, 41, 0).datetime, Arrow(2017, 11, 14, 15, 25, 0).datetime, Arrow(2017, 11, 15, 3, 55, 0).datetime, ], utc=True) }) trades1 = extract_trades_of_period(data, trades) # First and last trade are dropped as they are out of range assert len(trades1) == 2 assert trades1.iloc[0].open_time == Arrow(2017, 11, 14, 9, 41, 0).datetime assert trades1.iloc[0].close_time == Arrow(2017, 11, 14, 10, 41, 0).datetime assert trades1.iloc[-1].open_time == Arrow(2017, 11, 14, 14, 20, 0).datetime assert trades1.iloc[-1].close_time == Arrow(2017, 11, 14, 15, 25, 0).datetime
def test_generate_graph_no_signals_no_trades(default_conf, mocker, caplog): row_mock = mocker.patch('freqtrade.plot.plotting.generate_row', MagicMock(side_effect=fig_generating_mock)) trades_mock = mocker.patch('freqtrade.plot.plotting.plot_trades', MagicMock(side_effect=fig_generating_mock)) pair = "UNITTEST/BTC" timerange = TimeRange(None, 'line', 0, -1000) data = history.load_pair_history(pair=pair, ticker_interval='1m', datadir=None, timerange=timerange) data['buy'] = 0 data['sell'] = 0 indicators1 = [] indicators2 = [] fig = generate_graph(pair=pair, data=data, trades=None, indicators1=indicators1, indicators2=indicators2) assert isinstance(fig, go.Figure) assert fig.layout.title.text == pair figure = fig.layout.figure assert len(figure.data) == 2 # Candlesticks are plotted first candles = find_trace_in_fig_data(figure.data, "Price") assert isinstance(candles, go.Candlestick) volume = find_trace_in_fig_data(figure.data, "Volume") assert isinstance(volume, go.Bar) assert row_mock.call_count == 2 assert trades_mock.call_count == 1 assert log_has("No buy-signals found.", caplog.record_tuples) assert log_has("No sell-signals found.", caplog.record_tuples)
def load_data(datadir: Optional[Path], ticker_interval: str, pairs: List[str], refresh_pairs: bool = False, exchange: Optional[Exchange] = None, timerange: TimeRange = TimeRange(None, None, 0, 0), fill_up_missing: bool = True, live: bool = False) -> Dict[str, DataFrame]: """ Loads ticker history data for a list of pairs the given parameters :return: dict(<pair>:<tickerlist>) """ result: Dict[str, DataFrame] = {} if live: if exchange: logger.info('Live: Downloading data for all defined pairs ...') exchange.refresh_latest_ohlcv([(pair, ticker_interval) for pair in pairs]) result = { key[0]: value for key, value in exchange._klines.items() if value is not None } else: raise OperationalException( "Exchange needs to be initialized when using live data.") else: logger.info('Using local backtesting data ...') for pair in pairs: hist = load_pair_history(pair=pair, ticker_interval=ticker_interval, datadir=datadir, timerange=timerange, refresh_pairs=refresh_pairs, exchange=exchange, fill_up_missing=fill_up_missing) if hist is not None: result[pair] = hist return result
def test_backtest_1min_ticker_interval(default_conf, fee, mocker) -> None: mocker.patch('freqtrade.exchange.Exchange.get_fee', fee) patch_exchange(mocker) backtesting = Backtesting(default_conf) # Run a backtesting for an exiting 1min ticker_interval timerange = TimeRange(None, 'line', 0, -200) data = history.load_data(datadir=None, ticker_interval='1m', pairs=['UNITTEST/BTC'], timerange=timerange) processed = backtesting.strategy.tickerdata_to_dataframe(data) min_date, max_date = get_timeframe(processed) results = backtesting.backtest({ 'stake_amount': default_conf['stake_amount'], 'processed': processed, 'max_open_trades': 1, 'position_stacking': False, 'start_date': min_date, 'end_date': max_date, }) assert not results.empty assert len(results) == 1
def test_trim_tickerlist() -> None: file = os.path.join(os.path.dirname(__file__), '..', 'testdata', 'UNITTEST_BTC-1m.json') with open(file) as data_file: ticker_list = json.load(data_file) ticker_list_len = len(ticker_list) # Test the pattern ^(-\d+)$ # This pattern uses the latest N elements timerange = TimeRange(None, 'line', 0, -5) ticker = trim_tickerlist(ticker_list, timerange) ticker_len = len(ticker) assert ticker_len == 5 assert ticker_list[0] is not ticker[ 0] # The first element should be different assert ticker_list[-1] is ticker[-1] # The last element must be the same # Test the pattern ^(\d+)-$ # This pattern keep X element from the end timerange = TimeRange('line', None, 5, 0) ticker = trim_tickerlist(ticker_list, timerange) ticker_len = len(ticker) assert ticker_len == 5 assert ticker_list[0] is ticker[0] # The first element must be the same assert ticker_list[-1] is not ticker[ -1] # The last element should be different # Test the pattern ^(\d+)-(\d+)$ # This pattern extract a window timerange = TimeRange('index', 'index', 5, 10) ticker = trim_tickerlist(ticker_list, timerange) ticker_len = len(ticker) assert ticker_len == 5 assert ticker_list[0] is not ticker[ 0] # The first element should be different assert ticker_list[5] is ticker[0] # The list starts at the index 5 assert ticker_list[9] is ticker[ -1] # The list ends at the index 9 (5 elements) # Test the pattern ^(\d{8})-(\d{8})$ # This pattern extract a window between the dates timerange = TimeRange('date', 'date', ticker_list[5][0] / 1000, ticker_list[10][0] / 1000 - 1) ticker = trim_tickerlist(ticker_list, timerange) ticker_len = len(ticker) assert ticker_len == 5 assert ticker_list[0] is not ticker[ 0] # The first element should be different assert ticker_list[5] is ticker[0] # The list starts at the index 5 assert ticker_list[9] is ticker[ -1] # The list ends at the index 9 (5 elements) # Test the pattern ^-(\d{8})$ # This pattern extracts elements from the start to the date timerange = TimeRange(None, 'date', 0, ticker_list[10][0] / 1000 - 1) ticker = trim_tickerlist(ticker_list, timerange) ticker_len = len(ticker) assert ticker_len == 10 assert ticker_list[0] is ticker[0] # The start of the list is included assert ticker_list[9] is ticker[-1] # The element 10 is not included # Test the pattern ^(\d{8})-$ # This pattern extracts elements from the date to now timerange = TimeRange('date', None, ticker_list[10][0] / 1000 - 1, None) ticker = trim_tickerlist(ticker_list, timerange) ticker_len = len(ticker) assert ticker_len == ticker_list_len - 10 assert ticker_list[10] is ticker[0] # The first element is element #10 assert ticker_list[-1] is ticker[-1] # The last element is the same # Test a wrong pattern # This pattern must return the list unchanged timerange = TimeRange(None, None, None, 5) ticker = trim_tickerlist(ticker_list, timerange) ticker_len = len(ticker) assert ticker_list_len == ticker_len # Test invalid timerange (start after stop) timerange = TimeRange('index', 'index', 10, 5) with pytest.raises(ValueError, match=r'The timerange .* is incorrect'): trim_tickerlist(ticker_list, timerange) assert ticker_list_len == ticker_len # passing empty list timerange = TimeRange(None, None, None, 5) ticker = trim_tickerlist([], timerange) assert 0 == len(ticker) assert not ticker
def test_load_cached_data_for_updating(mocker) -> None: datadir = Path(__file__).parent.parent.joinpath('testdata') test_data = None test_filename = datadir.joinpath('UNITTEST_BTC-1m.json') with open(test_filename, "rt") as file: test_data = json.load(file) # change now time to test 'line' cases # now = last cached item + 1 hour now_ts = test_data[-1][0] / 1000 + 60 * 60 mocker.patch('arrow.utcnow', return_value=arrow.get(now_ts)) # timeframe starts earlier than the cached data # should fully update data timerange = TimeRange('date', None, test_data[0][0] / 1000 - 1, 0) data, start_ts = load_cached_data_for_updating(test_filename, '1m', timerange) assert data == [] assert start_ts == test_data[0][0] - 1000 # same with 'line' timeframe num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 120 data, start_ts = load_cached_data_for_updating( test_filename, '1m', TimeRange(None, 'line', 0, -num_lines)) assert data == [] assert start_ts < test_data[0][0] - 1 # timeframe starts in the center of the cached data # should return the chached data w/o the last item timerange = TimeRange('date', None, test_data[0][0] / 1000 + 1, 0) data, start_ts = load_cached_data_for_updating(test_filename, '1m', timerange) assert data == test_data[:-1] assert test_data[-2][0] < start_ts < test_data[-1][0] # same with 'line' timeframe num_lines = (test_data[-1][0] - test_data[1][0]) / 1000 / 60 + 30 timerange = TimeRange(None, 'line', 0, -num_lines) data, start_ts = load_cached_data_for_updating(test_filename, '1m', timerange) assert data == test_data[:-1] assert test_data[-2][0] < start_ts < test_data[-1][0] # timeframe starts after the chached data # should return the chached data w/o the last item timerange = TimeRange('date', None, test_data[-1][0] / 1000 + 1, 0) data, start_ts = load_cached_data_for_updating(test_filename, '1m', timerange) assert data == test_data[:-1] assert test_data[-2][0] < start_ts < test_data[-1][0] # same with 'line' timeframe num_lines = 30 timerange = TimeRange(None, 'line', 0, -num_lines) data, start_ts = load_cached_data_for_updating(test_filename, '1m', timerange) assert data == test_data[:-1] assert test_data[-2][0] < start_ts < test_data[-1][0] # no timeframe is set # should return the chached data w/o the last item num_lines = 30 timerange = TimeRange(None, 'line', 0, -num_lines) data, start_ts = load_cached_data_for_updating(test_filename, '1m', timerange) assert data == test_data[:-1] assert test_data[-2][0] < start_ts < test_data[-1][0] # no datafile exist # should return timestamp start time timerange = TimeRange('date', None, now_ts - 10000, 0) data, start_ts = load_cached_data_for_updating( test_filename.with_name('unexist'), '1m', timerange) assert data == [] assert start_ts == (now_ts - 10000) * 1000 # same with 'line' timeframe num_lines = 30 timerange = TimeRange(None, 'line', 0, -num_lines) data, start_ts = load_cached_data_for_updating( test_filename.with_name('unexist'), '1m', timerange) assert data == [] assert start_ts == (now_ts - num_lines * 60) * 1000 # no datafile exist, no timeframe is set # should return an empty array and None data, start_ts = load_cached_data_for_updating( test_filename.with_name('unexist'), '1m', None) assert data == [] assert start_ts is None
dl_path = Path(args.export) if not dl_path.is_dir(): sys.exit(f'Directory {dl_path} does not exist.') pairs_file = Path( args.pairs_file) if args.pairs_file else dl_path.joinpath('pairs.json') if not pairs_file.exists(): sys.exit(f'No pairs file found with path {pairs_file}.') with pairs_file.open() as file: PAIRS = list(set(json.load(file))) PAIRS.sort() timerange = TimeRange() if args.days: time_since = arrow.utcnow().shift(days=-args.days).strftime("%Y%m%d") timerange = arguments.parse_timerange(f'{time_since}-') print(f'About to download pairs: {PAIRS} to {dl_path}') # Init exchange exchange = Exchange(config) pairs_not_available = [] for pair in PAIRS: if pair not in exchange._api.markets: pairs_not_available.append(pair) print(f"skipping pair {pair}") continue