Ejemplo n.º 1
0
def test_price_queries(price_historian, data_dir, database):
    """Test some historical price queries. Make sure that we test some
    assets not in cryptocompare but in coigecko so the backup mechanism triggers and works"""

    # These should hit cryptocompare
    assert price_historian.query_historical_price(A_BTC, A_EUR,
                                                  1479200704) == FVal('663.66')
    assert price_historian.query_historical_price(
        A_XMR, A_BTC, 1579200704) == FVal('0.007526')
    # this should hit the cryptocompare cache we are creating here
    contents = """{"start_time": 0, "end_time": 1439390800,
    "data": [{"time": 1438387200, "close": 10, "high": 10, "low": 10, "open": 10,
    "volumefrom": 10, "volumeto": 10}, {"time": 1438390800, "close": 20, "high": 20,
    "low": 20, "open": 20, "volumefrom": 20, "volumeto": 20}]}"""
    price_history_dir = get_or_make_price_history_dir(data_dir)
    with open(price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX}DASH_USD.json',
              'w') as f:
        f.write(contents)
    price_historian._PriceHistorian__instance._cryptocompare = Cryptocompare(
        data_directory=data_dir,
        database=database,
    )
    price_historian.set_oracles_order(price_historian._oracles)
    assert price_historian.query_historical_price(A_DASH, A_USD,
                                                  1438387700) == FVal('10')
    # this should hit coingecko, since cornichon is not in cryptocompare
    cornichon = Asset('CORN-2')
    expected_price = FVal('0.07830444726516915')
    assert price_historian.query_historical_price(cornichon, A_USD,
                                                  1608854400) == expected_price
Ejemplo n.º 2
0
    def __new__(
            cls,
            data_dir: Path = None,
            cryptocompare: 'Cryptocompare' = None,
            coingecko: 'Coingecko' = None,
    ) -> 'Inquirer':
        if Inquirer.__instance is not None:
            return Inquirer.__instance

        assert data_dir, 'arguments should be given at the first instantiation'
        assert cryptocompare, 'arguments should be given at the first instantiation'
        assert coingecko, 'arguments should be given at the first instantiation'

        Inquirer.__instance = object.__new__(cls)

        Inquirer.__instance._data_directory = data_dir
        Inquirer._cryptocompare = cryptocompare
        Inquirer._coingecko = coingecko
        Inquirer._cached_current_price = {}
        # Make price history directory if it does not exist
        price_history_dir = get_or_make_price_history_dir(data_dir)
        filename = price_history_dir / 'price_history_forex.json'
        try:
            with open(filename, 'r') as f:
                # we know price_history_forex contains a dict
                data = rlk_jsonloads_dict(f.read())
                Inquirer.__instance._cached_forex_data = data
        except (OSError, JSONDecodeError):
            Inquirer.__instance._cached_forex_data = {}

        return Inquirer.__instance
Ejemplo n.º 3
0
 def save_historical_forex_data() -> None:
     instance = Inquirer()
     # Make price history directory if it does not exist
     price_history_dir = get_or_make_price_history_dir(instance._data_directory)
     filename = price_history_dir / 'price_history_forex.json'
     with open(filename, 'w') as outfile:
         outfile.write(rlk_jsondumps(instance._cached_forex_data))
Ejemplo n.º 4
0
def test_cryptocompare_histohour_data_going_backward(data_dir, database, freezer):
    """Test that the cryptocompare histohour data retrieval works properly

    This test checks that doing an additional query in the past workd properly
    and that the cached data are properly appended to the cached result. In production
    this scenario should not happen often. Only way to happen if cryptocompare somehow adds
    older data than what was previously queried.
    """
    # first timestamp cryptocompare has histohour BTC/USD when queried from this test is
    btc_start_ts = 1279936800
    # first timestamp cryptocompare has histohour BTC/USD is: 1279940400
    now_ts = btc_start_ts + 3600 * 2000 + 122
    # create a cache file for BTC_USD
    contents = """{"start_time": 1301536800, "end_time": 1301540400,
    "data": [{"time": 1301536800, "close": 0.298, "high": 0.298, "low": 0.298, "open": 0.298,
    "volumefrom": 0.298, "volumeto": 0.298}, {"time": 1301540400, "close": 0.298, "high": 0.298,
    "low": 0.298, "open": 0.298, "volumefrom": 0.298, "volumeto": 0.298}]}"""
    price_history_dir = get_or_make_price_history_dir(data_dir)
    with open(price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX}BTC_USD.json', 'w') as f:
        f.write(contents)
    freezer.move_to(datetime.fromtimestamp(now_ts))
    cc = Cryptocompare(data_directory=data_dir, database=database)
    result = cc.get_historical_data(
        from_asset=A_BTC,
        to_asset=A_USD,
        timestamp=now_ts - 3600 * 2 - 55,
        only_check_cache=False,
    )
    cache_key = PairCacheKey('BTC_USD')
    assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT * 3 + 2
    check_cc_result(result, forward=False)
    assert cache_key in cc.price_history
    assert cc.price_history[cache_key].start_time == btc_start_ts
    assert cc.price_history[cache_key].end_time == now_ts
    check_cc_result(cc.price_history[cache_key].data, forward=False)
Ejemplo n.º 5
0
    def _save_cached_price(
        self,
        from_asset: Asset,
        to_asset: Asset,
        date: str,
        price: Price,
    ) -> None:
        price_history_dir = get_or_make_price_history_dir(self.data_directory)
        filename = (
            price_history_dir /
            f'{PRICE_HISTORY_FILE_PREFIX }{from_asset.identifier}_{to_asset.identifier}.json'
        )
        data: Dict[str, Price] = {}
        if filename.is_file():
            with open(filename, 'r') as f:
                try:
                    data = rlk_jsonloads_dict(f.read())
                except JSONDecodeError:
                    data = {}

        if not isinstance(data, dict):
            data = {}

        data[date] = price
        with open(filename, 'w') as outfile:
            outfile.write(rlk_jsondumps(data))
Ejemplo n.º 6
0
def test_cryptocompare_historical_data_use_cached_price(data_dir, database):
    """Test that the cryptocompare cache is used and also properly deserialized"""
    # Create a cache file for SNGLS_BTC
    contents = """{"start_time": 0, "end_time": 1439390800,
    "data": [{"time": 1438387200, "close": 10, "high": 10, "low": 10, "open": 10,
    "volumefrom": 10, "volumeto": 10}, {"time": 1438390800, "close": 20, "high": 20,
    "low": 20, "open": 20, "volumefrom": 20, "volumeto": 20}]}"""
    price_history_dir = get_or_make_price_history_dir(data_dir)
    with open(price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX}SNGLS_BTC.json', 'w') as f:
        f.write(contents)

    cc = Cryptocompare(data_directory=data_dir, database=database)
    with patch.object(cc, 'query_endpoint_histohour') as histohour_mock:
        result = cc.get_historical_data(
            from_asset=A_SNGLS,
            to_asset=A_BTC,
            timestamp=1438390801,
            only_check_cache=False,
        )
        # make sure that histohour was not called, in essence that the cache was used
        assert histohour_mock.call_count == 0

    assert len(result) == 2
    assert isinstance(result[0].low, FVal)
    assert result[0].low == FVal(10)
    assert isinstance(result[0].high, FVal)
    assert result[0].high == FVal(10)
    assert isinstance(result[1].low, FVal)
    assert result[1].low == FVal(20)
    assert isinstance(result[1].high, FVal)
    assert result[1].high == FVal(20)
Ejemplo n.º 7
0
    def __init__(self, data_directory: Path,
                 database: Optional[DBHandler]) -> None:
        super().__init__(database=database,
                         service_name=ExternalService.CRYPTOCOMPARE)
        self.data_directory = data_directory
        self.price_history: Dict[PairCacheKey, PriceHistoryData] = {}
        self.price_history_file: Dict[PairCacheKey, Path] = {}
        self.session = requests.session()
        self.session.headers.update({'User-Agent': 'rotkehlchen'})
        self.last_histohour_query_ts = 0
        self.last_rate_limit = 0

        price_history_dir = get_or_make_price_history_dir(data_directory)
        # Check the data folder and remember the filenames of any cached history
        prefix = os.path.join(str(price_history_dir),
                              PRICE_HISTORY_FILE_PREFIX)
        prefix = prefix.replace('\\', '\\\\')
        regex = re.compile(prefix + r'(.*)\.json')

        for file_ in price_history_dir.rglob(PRICE_HISTORY_FILE_PREFIX +
                                             '*.json'):
            file_str = str(file_).replace('\\\\', '\\')
            match = regex.match(file_str)
            assert match
            cache_key = PairCacheKey(match.group(1))
            self.price_history_file[cache_key] = file_
Ejemplo n.º 8
0
def upgrade_v22_to_v23(db: 'DBHandler') -> None:
    """Upgrades the DB from v22 to v23

    - Migrates the settings entries 'thousand_separator', 'decimal_separator'
    and 'currency_location' into the 'frontend_settings' entry.
    - Deletes Bitfinex trades and their used query range, so trades can be
    populated again with the right `fee_asset`.
    - Delete all cryptocompare price cache files. Move forex price cache to price_history directory
    """
    settings = ('"thousand_separator"', '"decimal_separator"', '"currency_location"')
    cursor = db.conn.cursor()
    # Get the settings and put them in a dict
    setting_value_map = dict(
        cursor.execute(
            f'SELECT name, value FROM settings WHERE name IN ({",".join(settings)});',
        ).fetchall(),
    )
    # If the settings exist, migrate them into the 'frontend_settings' entry
    if setting_value_map:
        frontend_settings = cursor.execute(
            'SELECT value FROM settings WHERE name = "frontend_settings";',
        ).fetchone()

        if frontend_settings is not None:
            setting_value_map.update(json.loads(frontend_settings[0]))

        cursor.execute(
            'INSERT OR REPLACE INTO settings(name, value) VALUES(?, ?)',
            ('frontend_settings', json.dumps(setting_value_map)),
        )
    # Delete the settings
    cursor.execute(f'DELETE FROM settings WHERE name IN ({",".join(settings)});')
    # Delete Bitfinex used_query_ranges
    cursor.execute('DELETE FROM used_query_ranges WHERE name = "bitfinex_trades";')
    # Delete Bitfinex trades
    cursor.execute('DELETE FROM trades WHERE location = "T";')
    # Delete deprecated historical data start setting
    cursor.execute('DELETE from settings WHERE name="historical_data_start";')
    db.conn.commit()

    # -- Now move forex history to the new directory and remove all old cache files
    data_directory = db.user_data_dir.parent
    price_history_dir = get_or_make_price_history_dir(data_directory)
    forex_history_file = data_directory / 'price_history_forex.json'
    if forex_history_file.is_file():
        shutil.move(
            forex_history_file,  # type: ignore
            price_history_dir / 'forex_history_file.json',
        )

    prefix = os.path.join(str(data_directory), 'price_history_')
    prefix = prefix.replace('\\', '\\\\')
    files_list = glob.glob(prefix + '*.json')
    for file_ in files_list:
        file_ = file_.replace('\\\\', '\\')
        try:
            Path(file_).unlink()
        except OSError:
            pass
Ejemplo n.º 9
0
def test_get_cached_data_metadata(data_dir, database):
    """Test that the get_cached_data_metadata function works correctly
    and returns just the metadata by reading part ofthe file
    """
    contents = """{"start_time": 1301536800, "end_time": 1301540400,
    "data": [{"time": 1301536800, "close": 0.298, "high": 0.298, "low": 0.298, "open": 0.298,
    "volumefrom": 0.298, "volumeto": 0.298}, {"time": 1301540400, "close": 0.298, "high": 0.298,
    "low": 0.298, "open": 0.298, "volumefrom": 0.298, "volumeto": 0.298}]}"""
    price_history_dir = get_or_make_price_history_dir(data_dir)
    with open(price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX}BTC_USD.json', 'w') as f:
        f.write(contents)
    cc = Cryptocompare(data_directory=data_dir, database=database)
    result = cc.get_cached_data_metadata(
        from_asset=A_BTC,
        to_asset=A_USD,
    )
    assert result is not None
    assert result[0] == 1301536800
    assert result[1] == 1301540400
Ejemplo n.º 10
0
    def _get_cached_price(self, from_asset: Asset, to_asset: Asset,
                          date: str) -> Optional[Price]:
        price_history_dir = get_or_make_price_history_dir(self.data_directory)
        filename = (
            price_history_dir /
            f'{PRICE_HISTORY_FILE_PREFIX }{from_asset.identifier}_{to_asset.identifier}.json'
        )
        if not filename.is_file():
            return None

        with open(filename, 'r') as f:
            try:
                data: Dict[str, Price] = rlk_jsonloads_dict(f.read())
            except JSONDecodeError:
                return None

        if not isinstance(data, dict):
            return None

        return data.get(date, None)
Ejemplo n.º 11
0
def test_get_cached_data_metadata(data_dir, database):
    """Test that the get_cached_data_metadata function works correctly
    and returns just the metadata by reading part ofthe file

    The json cache data in production are saved as one line files.
    So here we also keep it in one line on purpose. The previous
    regex we used failed with 1 line json file
    """
    contents = """{"start_time": 1301536800, "end_time": 1301540400, "data": [{"time": 1301536800, "close": 0.298, "high": 0.298, "low": 0.298, "open": 0.298, "volumefrom": 0.298, "volumeto": 0.298}, {"time": 1301540400, "close": 0.298, "high": 0.298, "low": 0.298, "open": 0.298, "volumefrom": 0.298, "volumeto": 0.298}]}"""  # noqa: E501
    price_history_dir = get_or_make_price_history_dir(data_dir)
    with open(price_history_dir / f'{PRICE_HISTORY_FILE_PREFIX}BTC_USD.json', 'w') as f:
        f.write(contents)
    cc = Cryptocompare(data_directory=data_dir, database=database)
    # make sure that _read_cachefile_metadata runs and they are read from file and not from memory
    cc.price_history = {}
    result = cc.get_cached_data_metadata(
        from_asset=A_BTC,
        to_asset=A_USD,
    )
    assert result is not None
    assert result[0] == 1301536800
    assert result[1] == 1301540400
Ejemplo n.º 12
0
    def get_historical_data(
        self,
        from_asset: Asset,
        to_asset: Asset,
        timestamp: Timestamp,
        only_check_cache: bool,
    ) -> Optional[List[PriceHistoryEntry]]:
        """
        Get historical hour price data from cryptocompare

        Returns a sorted list of price entries.

        If only_check_cache is True then if the data is not cached locally this will return None

        - May raise RemoteError if there is a problem reaching the cryptocompare server
        or with reading the response returned by the server
        - May raise UnsupportedAsset if from/to asset is not supported by cryptocompare
        """
        log.debug(
            'Retrieving historical price data from cryptocompare',
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
        )
        cache_key = PairCacheKey(from_asset.identifier + '_' +
                                 to_asset.identifier)
        cached_data = self._got_cached_data_at_timestamp(
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
        )
        if cached_data is not None:
            return cached_data.data

        if only_check_cache:
            return None

        now_ts = ts_now()
        # save time at start of the query, in case the query does not complete due to rate ilmit
        self.last_histohour_query_ts = now_ts
        if cache_key in self.price_history:
            old_data = self.price_history[cache_key].data
            if timestamp > self.price_history[cache_key].end_time:
                # We have a cache but the requested timestamp does not hit it
                new_data = self._get_histohour_data_for_range(
                    from_asset=from_asset,
                    to_asset=to_asset,
                    from_timestamp=now_ts,
                    to_timestamp=self.price_history[cache_key].end_time,
                )
                if old_data[-1].time == new_data[0]['time']:
                    old_data = old_data[:-1]
                new_history = deque([x._asdict() for x in old_data]) + new_data
            else:
                # only other possibility, timestamp < cached start_time
                # Get all available data, even before to_timestamp
                new_data = self._get_histohour_data_for_range(
                    from_asset=from_asset,
                    to_asset=to_asset,
                    from_timestamp=self.price_history[cache_key].start_time,
                    to_timestamp=Timestamp(0),
                )
                if new_data[-1]['time'] == old_data[0].time:
                    new_data.pop()
                new_history = new_data + deque([x._asdict() for x in old_data])

            calculated_history = list(new_history)

        else:
            calculated_history = list(
                self._get_histohour_data_for_range(
                    from_asset=from_asset,
                    to_asset=to_asset,
                    from_timestamp=now_ts,
                    to_timestamp=Timestamp(0),
                ))

        if len(calculated_history) == 0:
            return []  # empty list means we found nothing

        # Let's always check for data sanity for the hourly prices.
        _check_hourly_data_sanity(calculated_history, from_asset, to_asset)
        # and now since we actually queried the data let's also cache them
        filename = (get_or_make_price_history_dir(self.data_directory) /
                    (PRICE_HISTORY_FILE_PREFIX + cache_key + '.json'))
        log.info(
            'Updating price history cache',
            filename=filename,
            from_asset=from_asset,
            to_asset=to_asset,
        )
        write_history_data_in_file(
            data=calculated_history,
            filepath=filename,
            start_ts=calculated_history[0]['time'],
            end_ts=now_ts,
        )

        # Finally save the objects in memory and return them
        data_including_time = {
            'data': calculated_history,
            'start_time': calculated_history[0]['time'],
            'end_time': now_ts,
        }
        self.price_history_file[cache_key] = filename
        self.price_history[cache_key] = _dict_history_to_data(
            data_including_time)
        self.last_histohour_query_ts = ts_now(
        )  # also save when last query finished
        return self.price_history[cache_key].data