def test_cryptocompare_historical_data_use_cached_price(data_dir, database): """Test that the cryptocompare cache is used and also properly deserialized""" # Create a cache file for SNGLS_BTC contents = """{"start_time": 0, "end_time": 1439390800, "data": [{"time": 1438387200, "close": 10, "high": 10, "low": 10, "open": 10, "volumefrom": 10, "volumeto": 10}, {"time": 1438390800, "close": 20, "high": 20, "low": 20, "open": 20, "volumefrom": 20, "volumeto": 20}]}""" with open(os.path.join(data_dir, 'price_history_SNGLS_BTC.json'), 'w') as f: f.write(contents) cc = Cryptocompare(data_directory=data_dir, database=database) with patch.object(cc, 'query_endpoint_histohour') as histohour_mock: result = cc.get_historical_data( from_asset=A_SNGLS, to_asset=A_BTC, timestamp=1438390801, historical_data_start=0, ) # make sure that histohour was not called, in essence that the cache was used assert histohour_mock.call_count == 0 assert len(result) == 2 assert isinstance(result[0].low, FVal) assert result[0].low == FVal(10) assert isinstance(result[0].high, FVal) assert result[0].high == FVal(10) assert isinstance(result[1].low, FVal) assert result[1].low == FVal(20) assert isinstance(result[1].high, FVal) assert result[1].high == FVal(20)
def test_cryptocompare_historical_data_price( data_dir, database, from_asset, to_asset, timestamp, price, ): """Test that the cryptocompare histohour data retrieval works and price is returned """ cc = Cryptocompare(data_directory=data_dir, database=database) # Get lots of historical prices from at least 1 query after the ts we need result = cc.get_historical_data( from_asset=from_asset, to_asset=to_asset, timestamp=timestamp + 2020 * 3600, only_check_cache=False, ) # Query the ts we need from the cached data result_price = cc._retrieve_price_from_data( data=result, from_asset=from_asset, to_asset=to_asset, timestamp=timestamp, ) assert result_price == price
def test_cryptocompare_histohour_data_going_backward(data_dir, database, freezer): """Test that the cryptocompare histohour data retrieval works properly This test checks that doing an additional query in the past workd properly and that the cached data are properly appended to the cached result. In production this scenario should not happen often. Only way to happen if cryptocompare somehow adds older data than what was previously queried. """ # first timestamp cryptocompare has histohour BTC/USD when queried from this test is btc_start_ts = 1279936800 # first timestamp cryptocompare has histohour BTC/USD is: 1279940400 now_ts = btc_start_ts + 3600 * 2000 + 122 # create a cache file for BTC_USD contents = """{"start_time": 1301536800, "end_time": 1301540400, "data": [{"time": 1301536800, "close": 0.298, "high": 0.298, "low": 0.298, "open": 0.298, "volumefrom": 0.298, "volumeto": 0.298}, {"time": 1301540400, "close": 0.298, "high": 0.298, "low": 0.298, "open": 0.298, "volumefrom": 0.298, "volumeto": 0.298}]}""" price_history_dir = get_or_make_price_history_dir(data_dir) with open(price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX}BTC_USD.json', 'w') as f: f.write(contents) freezer.move_to(datetime.fromtimestamp(now_ts)) cc = Cryptocompare(data_directory=data_dir, database=database) result = cc.get_historical_data( from_asset=A_BTC, to_asset=A_USD, timestamp=now_ts - 3600 * 2 - 55, only_check_cache=False, ) cache_key = PairCacheKey('BTC_USD') assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT * 3 + 2 check_cc_result(result, forward=False) assert cache_key in cc.price_history assert cc.price_history[cache_key].start_time == btc_start_ts assert cc.price_history[cache_key].end_time == now_ts check_cc_result(cc.price_history[cache_key].data, forward=False)
def test_cryptocompare_histohour_data_going_forward(data_dir, database, freezer): """Test that the cryptocompare histohour data retrieval works properly This test checks that doing an additional query in the future works properly and appends the cached data with the newly returned data """ # first timestamp cryptocompare has histohour BTC/USD when queried from this test is btc_start_ts = 1279940400 now_ts = btc_start_ts + 3600 * 2000 + 122 freezer.move_to(datetime.fromtimestamp(now_ts)) cc = Cryptocompare(data_directory=data_dir, database=database) result = cc.get_historical_data( from_asset=A_BTC, to_asset=A_USD, timestamp=now_ts - 3600 * 2 - 55, only_check_cache=False, ) cache_key = PairCacheKey('BTC_USD') assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT + 1 assert all(x.low == x.high == FVal('0.05454') for x in result) assert cache_key in cc.price_history assert cc.price_history[cache_key].start_time == btc_start_ts assert cc.price_history[cache_key].end_time == now_ts assert all(x.low == x.high == FVal('0.05454') for x in cc.price_history[cache_key].data) # now let's move a bit to the future and query again to see the cache is appended to now_ts = now_ts + 3600 * 2000 * 2 + 4700 freezer.move_to(datetime.fromtimestamp(now_ts)) result = cc.get_historical_data( from_asset=A_BTC, to_asset=A_USD, timestamp=now_ts - 3600 * 4 - 55, only_check_cache=False, ) assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT * 3 + 2 check_cc_result(result, forward=True) assert cache_key in cc.price_history assert cc.price_history[cache_key].start_time == btc_start_ts assert cc.price_history[cache_key].end_time == now_ts check_cc_result(cc.price_history[cache_key].data, forward=True)
def test_empty_histohour(data_dir, database, freezer): """Histohour can be empty and can have also floating point zeros like in CHI/EUR This test makes sure that an empty list is returned at the very first all zeros result that also has floating point and querying stops. If cryptocompare actually fixes their zero historical price problem this test can go away """ now_ts = 1610365553 freezer.move_to(datetime.fromtimestamp(now_ts)) cc = Cryptocompare(data_directory=data_dir, database=database) result = cc.get_historical_data( from_asset=Asset('CHI'), to_asset=Asset('EUR'), timestamp=now_ts, only_check_cache=False, ) assert len(result) == 0