Exemplo n.º 1
0
def add_ethereum_token_to_db(token_data: EthereumToken) -> EthereumToken:
    """Adds an ethereum token to the DB and returns it

    May raise:
    - InputError if token already exists in the DB
    """
    globaldb = GlobalDBHandler()
    globaldb.add_asset(
        asset_id=token_data.identifier,
        asset_type=AssetType.ETHEREUM_TOKEN,
        data=token_data,
    )
    # This can, but should not raise UnknownAsset, DeserializationError
    return EthereumToken(token_data.ethereum_address,
                         form_with_incomplete_data=True)
Exemplo n.º 2
0
def get_globaldb_cache_entries(from_asset: Asset, to_asset: Asset) -> List[HistoricalPrice]:
    """TODO: This should probaly be moved in the globaldb/handler.py if we use it elsewhere
    and made more generic (accept different sources)"""
    connection = GlobalDBHandler()._conn
    cursor = connection.cursor()
    query = cursor.execute(
        'SELECT from_asset, to_asset, source_type, timestamp, price FROM '
        'price_history WHERE from_asset=? AND to_asset=? AND source_type=? ORDER BY timestamp ASC',
        (
            from_asset.identifier,
            to_asset.identifier,
            HistoricalPriceOracle.CRYPTOCOMPARE.serialize_for_db(),  # pylint: disable=no-member
        ),
    )
    return [HistoricalPrice.deserialize_from_db(x) for x in query]
Exemplo n.º 3
0
def test_cryptocompare_histohour_data_going_backward(data_dir, database,
                                                     freezer):
    """Test that the cryptocompare histohour data retrieval works properly

    This test checks that doing an additional query in the past workd properly
    and that the cached data are properly appended to the cached result. In production
    this scenario should not happen often. Only way to happen if cryptocompare somehow adds
    older data than what was previously queried.
    """
    globaldb = GlobalDBHandler()
    # first timestamp cryptocompare has histohour BTC/USD when queried from this test is
    btc_start_ts = 1279936800
    # first timestamp cryptocompare has histohour BTC/USD is: 1279940400
    now_ts = btc_start_ts + 3600 * 2000 + 122
    # create a cache file for BTC_USD
    cache_data = [
        HistoricalPrice(
            from_asset=A_BTC,
            to_asset=A_USD,
            source=HistoricalPriceOracle.CRYPTOCOMPARE,
            timestamp=Timestamp(1301536800),
            price=Price(FVal('0.298')),
        ),
        HistoricalPrice(
            from_asset=A_BTC,
            to_asset=A_USD,
            source=HistoricalPriceOracle.CRYPTOCOMPARE,
            timestamp=Timestamp(1301540400),
            price=Price(FVal('0.298')),
        )
    ]
    globaldb.add_historical_prices(cache_data)

    freezer.move_to(datetime.fromtimestamp(now_ts))
    cc = Cryptocompare(data_directory=data_dir, database=database)
    cc.query_and_store_historical_data(
        from_asset=A_BTC,
        to_asset=A_USD,
        timestamp=now_ts - 3600 * 2 - 55,
    )
    result = get_globaldb_cache_entries(from_asset=A_BTC, to_asset=A_USD)
    assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT * 3 + 2
    check_cc_result(result, forward=False)
    data_range = globaldb.get_historical_price_range(
        A_BTC, A_USD, HistoricalPriceOracle.CRYPTOCOMPARE)  # noqa: E501
    assert data_range[0] == btc_start_ts
    assert data_range[
        1] == 1301540400  # that's the closest ts to now_ts cc returns
Exemplo n.º 4
0
 def __init__(self, msg_aggregator: MessagesAggregator) -> None:
     self.msg_aggregator = msg_aggregator
     self.v25exchangename_to_location = {
         'kraken': 'B',
         'poloniex': 'C',
         'bittrex': 'D',
         'bitmex': 'F',
         'binance': 'E',
         'coinbase': 'G',
         'coinbasepro': 'K',
         'gemini': 'L',
         'bitstamp': 'R',
         'binance_us': 'S',
         'bitfinex': 'T',
         'bitcoinde': 'U',
         'iconomi': 'V',
         'kucoin': 'W',
         'ftx': 'Z',
         'rotkehlchen': 'A',
     }
     globaldb = GlobalDBHandler()
     globaldb_conn = globaldb._conn
     globaldb_cursor = globaldb_conn.cursor()
     query = globaldb_cursor.execute('SELECT identifier from assets;')
     self.all_asset_ids = {x[0] for x in query}
Exemplo n.º 5
0
def test_cryptocompare_historical_data_price(
    data_dir,
    database,
    from_asset,
    to_asset,
    timestamp,
    price,
):
    """Test that the cryptocompare histohour data retrieval works and price is returned

    """
    cc = Cryptocompare(data_directory=data_dir, database=database)
    # Get lots of historical prices from at least 1 query after the ts we need
    cc.query_and_store_historical_data(
        from_asset=from_asset,
        to_asset=to_asset,
        timestamp=timestamp + 2020 * 3600,
    )
    # Query the ts we need directly from the cached data
    price_cache_entry = GlobalDBHandler().get_historical_price(
        from_asset=from_asset,
        to_asset=to_asset,
        timestamp=timestamp,
        max_seconds_distance=3600,
        source=HistoricalPriceOracle.CRYPTOCOMPARE,
    )
    assert price_cache_entry.price == price
Exemplo n.º 6
0
    def can_query_history(
            self,
            from_asset: Asset,
            to_asset: Asset,
            timestamp: Timestamp,
            seconds: Optional[int] = CRYPTOCOMPARE_RATE_LIMIT_WAIT_TIME,
    ) -> bool:
        """Checks if it's okay to query cryptocompare historical price. This is determined by:

        - Existence of a cached price
        - Last rate limit
        """
        data_range = GlobalDBHandler().get_historical_price_range(
            from_asset=from_asset,
            to_asset=to_asset,
            source=HistoricalPriceOracle.CRYPTOCOMPARE,
        )
        got_cached_data = data_range is not None and data_range[0] <= timestamp <= data_range[1]
        rate_limited = self.rate_limited_in_last(seconds)
        can_query = got_cached_data or not rate_limited
        log.debug(
            f'{"Will" if can_query else "Will not"} query '
            f'Cryptocompare history for {from_asset.identifier} -> '
            f'{to_asset.identifier} @ {timestamp}. Cached data: {got_cached_data}'
            f' rate_limited in last {seconds} seconds: {rate_limited}',
        )
        return can_query
Exemplo n.º 7
0
    def query_uncached_icons_batch(self, batch_size: int) -> bool:
        """Queries a batch of uncached icons for assets

        Returns true if there is more icons left to cache after this batch.
        """
        coingecko_integrated_asset_ids = []
        # type ignore is due to: https://github.com/python/mypy/issues/7781
        assets_list = GlobalDBHandler().get_all_asset_data(
            mapping=False)  # type:ignore
        for entry in assets_list:
            try:
                if entry.asset_type != AssetType.FIAT and entry.coingecko is not None and entry.coingecko != '':  # noqa: E501
                    coingecko_integrated_asset_ids.append(entry.identifier)
            except KeyError:
                log.warning(
                    f'Ignoring asset {entry.identifier} during query icons due to KeyError',
                )
                continue

        cached_asset_ids = [
            str(x.name)[:-10] for x in self.icons_dir.glob('*_thumb.png')
            if x.is_file()
        ]
        uncached_asset_ids = (set(coingecko_integrated_asset_ids) -
                              set(cached_asset_ids) - self.failed_asset_ids)
        log.info(
            f'Periodic task to query coingecko for {batch_size} uncached asset icons. '
            f'Uncached assets: {len(uncached_asset_ids)}. Cached assets: {len(cached_asset_ids)}',
        )
        for asset_name in itertools.islice(uncached_asset_ids, batch_size):
            self._query_coingecko_for_icon(Asset(asset_name))

        return len(uncached_asset_ids) > batch_size
Exemplo n.º 8
0
def test_fallback_to_cached_values_within_a_month(inquirer):  # pylint: disable=unused-argument
    def mock_api_remote_fail(url, timeout):  # pylint: disable=unused-argument
        return MockResponse(500, '{"msg": "shit hit the fan"')

    # Get a date 15 days ago and insert a cached entry for EUR JPY then
    # Get a date 31 days ago and insert a cache entry for EUR CNY then
    now = ts_now()
    eurjpy_val = Price(FVal('124.123'))
    cache_data = [
        HistoricalPrice(
            from_asset=A_EUR,
            to_asset=A_JPY,
            source=HistoricalPriceOracle.XRATESCOM,
            timestamp=Timestamp(now - 86400 * 15),
            price=eurjpy_val,
        ),
        HistoricalPrice(
            from_asset=A_EUR,
            to_asset=A_CNY,
            source=HistoricalPriceOracle.XRATESCOM,
            timestamp=Timestamp(now - 86400 * 31),
            price=Price(FVal('7.719')),
        )
    ]
    GlobalDBHandler().add_historical_prices(cache_data)

    with patch('requests.get', side_effect=mock_api_remote_fail):
        # We fail to find a response but then go back 15 days and find the cached response
        result = inquirer._query_fiat_pair(A_EUR, A_JPY)
        assert result == eurjpy_val
        # The cached response for EUR CNY is too old so we will fail here
        with pytest.raises(RemoteError):
            result = inquirer._query_fiat_pair(A_EUR, A_CNY)
Exemplo n.º 9
0
def main() -> None:
    args = parse_args()
    target_directory = os.getcwd() if args.target_directory is None else args.target_directory
    target_directory = Path(target_directory)
    if not target_directory.is_dir():
        print(f'Given directory {target_directory} not a valid directory')
        sys.exit(1)
    # The way global db works it needs to be under a directory called 'global_data'
    target_global_dir = target_directory / 'global_data'
    target_global_dir.mkdir(parents=True, exist_ok=True)
    get_remote_global_db(
        directory=target_global_dir,
        version=args.start_db,
        branch=args.assets_branch,
    )
    print('Applying updates...')
    GlobalDBHandler(data_dir=target_directory)
    assets_updater = AssetsUpdater(msg_aggregator=MessagesAggregator)
    conflicts = assets_updater.perform_update(
        up_to_version=args.target_version,
        conflicts=None,
    )
    if conflicts is not None:
        print('There were conflicts during the update. Bailing.')
        sys.exit(1)

    # Due to the way globaldb initializes we have two of them. Clean up the extra one
    print('Cleaning up...')
    (target_directory / 'global_data' / 'global.db').rename(target_directory / 'global.db')
    shutil.rmtree(target_directory / 'global_data')
    print('Done!')
Exemplo n.º 10
0
    def query_tokens_for_addresses(
        self,
        addresses: List[ChecksumEthAddress],
        force_detection: bool,
    ) -> TokensReturn:
        """Queries/detects token balances for a list of addresses

        If an address's tokens were recently autodetected they are not detected again but the
        balances are simply queried. Unless force_detection is True.

        Returns the token balances of each address and the usd prices of the tokens
        """
        log.debug(
            'Querying/detecting token balances for all addresses',
            force_detection=force_detection,
        )
        all_tokens = GlobalDBHandler().get_ethereum_tokens(exceptions=[
            # Ignore the veCRV balance in token query. It's already detected by
            # defi SDK as part of locked CRV in Vote Escrowed CRV. Which is the right way
            # to approach it as there is no way to assign a price to 1 veCRV. It
            # can be 1 CRV locked for 4 years or 4 CRV locked for 1 year etc.
            string_to_ethereum_address(
                '0x5f3b5DfEb7B28CDbD7FAba78963EE202a494e2A2'),
        ])
        # With etherscan with chunks > 120, we get request uri too large
        # so the limitation is not in the gas, but in the request uri length
        etherscan_chunks = list(
            get_chunks(all_tokens, n=ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH))
        other_chunks = list(
            get_chunks(all_tokens, n=OTHER_MAX_TOKEN_CHUNK_LENGTH))
        now = ts_now()
        token_usd_price: Dict[EthereumToken, Price] = {}
        result = {}

        for address in addresses:
            saved_list = self.db.get_tokens_for_address_if_time(
                address=address, current_time=now)
            if force_detection or saved_list is None:
                balances = self.detect_tokens_for_address(
                    address=address,
                    token_usd_price=token_usd_price,
                    etherscan_chunks=etherscan_chunks,
                    other_chunks=other_chunks,
                )
            else:
                if len(saved_list) == 0:
                    continue  # Do not query if we know the address has no tokens

                balances = defaultdict(FVal)
                self._get_tokens_balance_and_price(
                    address=address,
                    tokens=[x.to_custom_ethereum_token() for x in saved_list],
                    balances=balances,
                    token_usd_price=token_usd_price,
                    call_order=None,  # use defaults
                )

            result[address] = balances

        return result, token_usd_price
Exemplo n.º 11
0
    def find_yearn_price(
        self,
        token: EthereumToken,
    ) -> Optional[Price]:
        """
        Query price for a yearn vault v2 token using the pricePerShare method
        and the price of the underlying token.
        """
        assert self._ethereum is not None, 'Inquirer ethereum manager should have been initialized'  # noqa: E501

        maybe_underlying_token = GlobalDBHandler().fetch_underlying_tokens(
            token.ethereum_address)
        if maybe_underlying_token is None or len(maybe_underlying_token) != 1:
            log.error(f'Yearn vault token {token} without an underlying asset')
            return None

        underlying_token = EthereumToken(maybe_underlying_token[0].address)
        underlying_token_price = self.find_usd_price(underlying_token)
        # Get the price per share from the yearn contract
        contract = EthereumContract(
            address=token.ethereum_address,
            abi=YEARN_VAULT_V2_ABI,
            deployed_block=0,
        )
        try:
            price_per_share = contract.call(self._ethereum, 'pricePerShare')
            return Price(price_per_share * underlying_token_price /
                         10**token.decimals)
        except (RemoteError, BlockchainQueryError) as e:
            log.error(
                f'Failed to query pricePerShare method in Yearn v2 Vault. {str(e)}'
            )

        return None
Exemplo n.º 12
0
    def query_historical_price(
        cls,
        from_asset: Asset,
        to_asset: Asset,
        timestamp: Timestamp,
    ) -> Price:
        price_entry = GlobalDBHandler().get_historical_price(
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
            max_seconds_distance=3600,
            source=HistoricalPriceOracle.MANUAL,
        )
        if price_entry is not None:
            log.debug('Got historical manual price',
                      from_asset=from_asset,
                      to_asset=to_asset,
                      timestamp=timestamp)  # noqa: E501
            return price_entry.price

        raise NoPriceForGivenTimestamp(
            from_asset=from_asset,
            to_asset=to_asset,
            time=timestamp,
        )
Exemplo n.º 13
0
    def query_historical_price(
        cls,
        from_asset: Asset,
        to_asset: Asset,
        timestamp: Timestamp,
    ) -> Price:
        price_entry = GlobalDBHandler().get_historical_price(
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=timestamp,
            max_seconds_distance=3600,
            source=HistoricalPriceOracle.MANUAL,
        )
        if price_entry and price_entry.price != Price(ZERO):
            log.debug('Got historical manual price',
                      from_asset=from_asset,
                      to_asset=to_asset,
                      timestamp=timestamp)  # noqa: E501
            return price_entry.price

        raise NoPriceForGivenTimestamp(
            from_asset=from_asset,
            to_asset=to_asset,
            date=timestamp_to_date(
                timestamp,
                formatstr='%d/%m/%Y, %H:%M:%S',
                treat_as_local=True,
            ),
        )
Exemplo n.º 14
0
def test_asset_identifiers_are_unique_all_lowercased():
    """Test that adding an identifier that exists but with different case, would fail"""
    with pytest.raises(InputError):
        GlobalDBHandler().add_asset(
            'Eth',
            AssetType.BINANCE_TOKEN,
            {'name': 'a', 'symbol': 'b'},
        )
Exemplo n.º 15
0
def test_atoken_to_asset():
    cursor = GlobalDBHandler()._conn.cursor()
    result = cursor.execute(
        'SELECT A.address from ethereum_tokens as A LEFT OUTER JOIN assets as B '
        'WHERE A.address=B.details_reference AND A.protocol IN (?, ?)',
        ('aave', 'aave-v2'),
    )
    for entry in result:
        atoken = EthereumToken(entry[0])
        reserve_asset = atoken_to_asset(atoken)
        if atoken in ATOKENV1_TO_ASSET:
            assert reserve_asset == ATOKENV1_TO_ASSET[atoken]
        else:
            assert reserve_asset == ATOKENV2_ADDRESS_TO_RESERVE_ASSET[atoken.ethereum_address]

    for atokenv1, reserve_asset in ATOKENV1_TO_ASSET.items():
        assert atoken_to_asset(atokenv1) == reserve_asset
Exemplo n.º 16
0
def test_cryptocompare_asset_support(cryptocompare):
    """Try to detect if a token that we have as not supported by cryptocompare got added"""
    cc_assets = cryptocompare.all_coins()
    exceptions = (
        'BKC',     # Bankcoin Cash but Balkan Coin in CC
        'BNC',     # Bionic but Benja Coin in CC
        'BTG-2',   # Bitgem but Bitcoin Gold in CC
        'BTR',     # Bitether but Bither in CC
        'CBC-2',   # Cashbery coin but Casino Betting Coin in CC
        'CCN',     # CustomContractnetwork but CannaCoin in CC
        'CMCT-2',  # Cyber Movie Chain but Crowd Machine in CC
        'CORN-2',  # Cornichon but Corn in CC
        'CTX',     # Centauri coin but CarTaxi in CC
        'DIT',     # Direct insurance token but DitCoin in CC
        'DRM',     # Dreamcoin but Dreamchain in CC
        'DTX-2',   # Digital Ticks but Data Exchange in CC
        'GNC',     # Galaxy network but Greencoin in CC
        'KNT',     # Kora network but Knekted in CC
        'LKY',     # Linkey but LuckyCoin in CC
        'NTK-2',   # Netkoin but Neurotoken in CC
        'PAN',     # Panvala but Pantos in CC
        'PTT',     # Proton token but Pink Taxi Token in CC
        'RMC',     # Remicoin but Russian Miner Coin in CC
        'SOUL-2',  # Cryptosoul but Phantasma in CC
        'TIC',     # Thingschain but True Investment Coin in CC
        'TOK',     # TOKOK but Tokugawa Coin in CC
        'VD',      # Bitcoin card but Vindax Coin in CC
        'DT',      # Dragon Token but Dark Token in CC
        'MUST',    # Must (Cometh) but Must protocol in CC
        'SDT-2',   # Stake DAO token but TerraSDT in CC
        'BAC',     # Basis Cash but BACoin in CC
        'IHF',     # waiting until cryptocompare fixes historical price for this. https://github.com/rotki/rotki/pull/2176  # noqa: E501
        'FLOW',    # FLOW from dapper labs but "Flow Protocol" in CC
        'NCT-2',   # Name change token but Polyswarm in CC
        'NDX',     # newdex token but Index token in CC
        'ARCH-2',  # Archer DAO Governance token but Archcoin in CC
        'AC-2',    # Acoconut token but Asiacoin in CC
        'TON',     # Tontoken but Tokamak network in CC
        'FNK',     # Finiko token but FunKeyPai network in CC
        'LOTTO',   # Lotto token but LottoCoin in CC
        'XFI',     # Dfinance token but XFinance in CC
        'GOLD',    # Gold token but Golden Goose in CC
        'ACM',     # AC Milan Fan Token but Actinium in CC
        'TFC',     # TheFutbolCoin but The Freedom Coin in CC
        'MASK',    # Mask Network but NFTX Hashmask Index in CC
    )
    for asset_data in GlobalDBHandler().get_all_asset_data(mapping=False):
        potential_support = (
            asset_data.cryptocompare == '' and
            asset_data.symbol in cc_assets and
            asset_data.identifier not in exceptions
        )
        if potential_support:
            msg = (
                f'We have {asset_data.identifier} as not supported by cryptocompare but '
                f'the symbol appears in its supported assets'
            )
            test_warnings.warn(UserWarning(msg))
Exemplo n.º 17
0
def atoken_to_asset(atoken: EthereumToken) -> Optional[Asset]:
    if atoken == A_AETH_V1:
        return A_ETH
    if atoken == A_AREP_V1:
        return A_REP

    asset_symbol = atoken.symbol[1:]
    cursor = GlobalDBHandler().conn.cursor()
    result = cursor.execute(
        'SELECT A.address from ethereum_tokens as A LEFT OUTER JOIN assets as B '
        'WHERE A.address=B.details_reference AND B.symbol=? COLLATE NOCASE',
        (asset_symbol,),
    ).fetchall()
    if len(result) != 1:
        log.error(f'Could not find asset from {atoken} since multiple or no results were returned')
        return None

    return Asset(ethaddress_to_identifier(result[0][0]))
Exemplo n.º 18
0
def test_all_assets_json_tokens_address_is_checksummed():
    """Test that all ethereum saved token asset addresses are checksummed"""
    for asset_data in GlobalDBHandler().get_all_asset_data(mapping=False):
        if not asset_data.asset_type == AssetType.ETHEREUM_TOKEN:
            continue

        msg = (f'Ethereum token\'s {asset_data.name} ethereum address '
               f'is not checksummed {asset_data.ethereum_address}')
        assert is_checksum_address(asset_data.ethereum_address), msg
Exemplo n.º 19
0
def test_cryptocompare_histohour_data_going_forward(data_dir, database,
                                                    freezer):
    """Test that the cryptocompare histohour data retrieval works properly

    This test checks that doing an additional query in the future works properly
    and appends the cached data with the newly returned data
    """
    # first timestamp cryptocompare has histohour BTC/USD when queried from this test is
    btc_start_ts = 1279940400
    now_ts = btc_start_ts + 3600 * 2000 + 122
    freezer.move_to(datetime.fromtimestamp(now_ts))
    cc = Cryptocompare(data_directory=data_dir, database=database)
    cc.query_and_store_historical_data(
        from_asset=A_BTC,
        to_asset=A_USD,
        timestamp=now_ts - 3600 * 2 - 55,
    )

    globaldb = GlobalDBHandler()
    result = get_globaldb_cache_entries(from_asset=A_BTC, to_asset=A_USD)
    assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT + 1
    assert all(x.price == Price(FVal(0.05454)) for x in result)
    data_range = globaldb.get_historical_price_range(
        A_BTC, A_USD, HistoricalPriceOracle.CRYPTOCOMPARE)  # noqa: E501
    assert data_range[0] == btc_start_ts
    assert data_range[
        1] == 1287140400  # that's the closest ts to now_ts cc returns

    # now let's move a bit to the future and query again to see the cache is appended to
    now_ts = now_ts + 3600 * 2000 * 2 + 4700
    freezer.move_to(datetime.fromtimestamp(now_ts))
    cc.query_and_store_historical_data(
        from_asset=A_BTC,
        to_asset=A_USD,
        timestamp=now_ts - 3600 * 4 - 55,
    )
    result = get_globaldb_cache_entries(from_asset=A_BTC, to_asset=A_USD)
    assert len(result) == CRYPTOCOMPARE_HOURQUERYLIMIT * 3 + 2
    check_cc_result(result, forward=True)
    data_range = globaldb.get_historical_price_range(
        A_BTC, A_USD, HistoricalPriceOracle.CRYPTOCOMPARE)  # noqa: E501
    assert data_range[0] == btc_start_ts
    assert data_range[
        1] == 1301544000  # that's the closest ts to now_ts cc returns
Exemplo n.º 20
0
    def query_historical_fiat_exchange_rates(
        from_fiat_currency: Asset,
        to_fiat_currency: Asset,
        timestamp: Timestamp,
    ) -> Optional[Price]:
        assert from_fiat_currency.is_fiat(
        ), 'fiat currency should have been provided'
        assert to_fiat_currency.is_fiat(
        ), 'fiat currency should have been provided'
        # Check cache
        price_cache_entry = GlobalDBHandler().get_historical_price(
            from_asset=from_fiat_currency,
            to_asset=to_fiat_currency,
            timestamp=timestamp,
            max_seconds_distance=DAY_IN_SECONDS,
        )
        if price_cache_entry:
            return price_cache_entry.price

        try:
            prices_map = get_historical_xratescom_exchange_rates(
                from_asset=from_fiat_currency,
                time=timestamp,
            )
        except RemoteError:
            return None

        # Since xratecoms has daily rates let's save at timestamp of UTC day start
        for asset, asset_price in prices_map.items():
            GlobalDBHandler().add_historical_prices(entries=[
                HistoricalPrice(
                    from_asset=from_fiat_currency,
                    to_asset=asset,
                    source=HistoricalPriceOracle.XRATESCOM,
                    timestamp=timestamp_to_daystart_timestamp(timestamp),
                    price=asset_price,
                )
            ])
            if asset == to_fiat_currency:
                rate = asset_price

        log.debug('Historical fiat exchange rate query succesful', rate=rate)
        return rate
Exemplo n.º 21
0
    def addresses_to_decoders(self) -> Dict[ChecksumEthAddress, Tuple[Any, ...]]:
        compound_tokens = GlobalDBHandler().get_ethereum_tokens(protocol='compound')
        mapping: Dict[ChecksumEthAddress, Tuple[Any, ...]] = {}
        for token in compound_tokens:
            if token == A_COMP:
                continue

            mapping[token.ethereum_address] = (self.decode_compound_token_movement, token)
        mapping[COMPTROLLER_PROXY.address] = (self.decode_comp_claim,)
        return mapping
Exemplo n.º 22
0
def test_aave_reserve_mapping():
    atokensv1 = GlobalDBHandler().get_ethereum_tokens(protocol='aave')
    for token in atokensv1:
        underlying_asset = ATOKENV1_TO_ASSET[token]
        if underlying_asset == A_ETH:
            assert asset_to_aave_reserve_address(underlying_asset) == AAVE_ETH_RESERVE_ADDRESS
            continue

        assert aave_reserve_address_to_reserve_asset(underlying_asset.ethereum_address) == underlying_asset  # noqa: E501
        assert asset_to_aave_reserve_address(underlying_asset) == underlying_asset.ethereum_address
Exemplo n.º 23
0
    def create_cache(
            self,
            from_asset: Asset,
            to_asset: Asset,
            purge_old: bool,
    ) -> None:
        """Creates the cache of the given asset pair from the start of time
        until now

        if purge_old is true then any old cache in memory and in a file is purged

        May raise:
            - RemoteError if there is a problem reaching cryptocompare
            - UnsupportedAsset if any of the two assets is not supported by cryptocompare
        """
        now = ts_now()

        # If we got cached data for up to 1 hour ago there is no point doing anything
        data_range = GlobalDBHandler().get_historical_price_range(
            from_asset=from_asset,
            to_asset=to_asset,
            source=HistoricalPriceOracle.CRYPTOCOMPARE,
        )
        if data_range and now - data_range[1] < 3600 and not purge_old:
            log.debug(
                'Did not create new cache since we got cache until 1 hour ago',
                from_asset=from_asset,
                to_asset=to_asset,
            )
            return

        if purge_old:
            GlobalDBHandler().delete_historical_prices(
                from_asset=from_asset,
                to_asset=to_asset,
                source=HistoricalPriceOracle.CRYPTOCOMPARE,
            )
        self.query_and_store_historical_data(
            from_asset=from_asset,
            to_asset=to_asset,
            timestamp=now,
        )
Exemplo n.º 24
0
def asset_to_atoken(asset: Asset, version: int) -> Optional[EthereumToken]:
    if asset == A_ETH:
        return A_AETH_V1

    protocol = 'aave' if version == 1 else 'aave-v2'
    cursor = GlobalDBHandler().conn.cursor()
    result = cursor.execute(
        'SELECT A.address from ethereum_tokens as A LEFT OUTER JOIN assets as B '
        'WHERE A.protocol==? AND A.address=B.details_reference AND B.symbol=?',
        (protocol, 'a' + asset.symbol),
    ).fetchall()
    if len(result) != 1:
        log.error(f'Could not derive atoken from {asset} since multiple or no results were returned')  # noqa: E501
        return None

    try:
        return EthereumToken(result[0][0])
    except UnknownAsset:  # should not happen
        log.error(f'Could not derive atoken from {asset}. Couldnt turn {result[0]} to EthereumToken')  # noqa: E501
        return None
Exemplo n.º 25
0
def get_underlying_asset_price(token: EthereumToken) -> Optional[Price]:
    """Gets the underlying asset price for the given ethereum token

    TODO: This should be eventually pulled from the assets DB. All of these
    need to be updated, to contain proper protocol, and underlying assets.

    This function is neither in inquirer.py or chain/ethereum/defi.py
    due to recursive import problems
    """
    price = None
    if token.protocol == UNISWAP_PROTOCOL:
        price = Inquirer().find_uniswap_v2_lp_price(token)
    elif token.protocol == CURVE_POOL_PROTOCOL:
        price = Inquirer().find_curve_pool_price(token)
    elif token.protocol == YEARN_VAULTS_V2_PROTOCOL:
        price = Inquirer().find_yearn_price(token)

    if token == A_YV1_ALINK:
        price = Inquirer().find_usd_price(A_ALINK_V1)
    elif token == A_YV1_GUSD:
        price = Inquirer().find_usd_price(A_GUSD)
    elif token in (A_YV1_DAI, A_FARM_DAI):
        price = Inquirer().find_usd_price(A_DAI)
    elif token in (A_FARM_WETH, A_YV1_WETH):
        price = Inquirer().find_usd_price(A_ETH)
    elif token == A_YV1_YFI:
        price = Inquirer().find_usd_price(A_YFI)
    elif token in (A_FARM_USDT, A_YV1_USDT):
        price = Inquirer().find_usd_price(A_USDT)
    elif token in (A_FARM_USDC, A_YV1_USDC):
        price = Inquirer().find_usd_price(A_USDC)
    elif token in (A_FARM_TUSD, A_YV1_TUSD):
        price = Inquirer().find_usd_price(A_TUSD)
    elif token in ASSETS_UNDERLYING_BTC:
        price = Inquirer().find_usd_price(A_BTC)

    # At this point we have to return the price if it's not None. If we don't do this and got
    # a price for a token that has underlying assets, the code will enter the if statement after
    # this block and the value for price will change becoming incorrect.
    if price is not None:
        return price

    custom_token = GlobalDBHandler().get_ethereum_token(token.ethereum_address)
    if custom_token and custom_token.underlying_tokens is not None:
        usd_price = ZERO
        for underlying_token in custom_token.underlying_tokens:
            token = EthereumToken(underlying_token.address)
            usd_price += Inquirer().find_usd_price(
                token) * underlying_token.weight
        if usd_price != Price(ZERO):
            price = Price(usd_price)

    return price
Exemplo n.º 26
0
    def query_tokens_for_addresses(
        self,
        addresses: List[ChecksumEthAddress],
        force_detection: bool,
    ) -> TokensReturn:
        """Queries/detects token balances for a list of addresses

        If an address's tokens were recently autodetected they are not detected again but the
        balances are simply queried. Unless force_detection is True.

        Returns the token balances of each address and the usd prices of the tokens
        """
        log.debug(
            'Querying/detecting token balances for all addresses',
            force_detection=force_detection,
        )
        all_tokens = GlobalDBHandler().get_ethereum_tokens()
        # With etherscan with chunks > 120, we get request uri too large
        # so the limitation is not in the gas, but in the request uri length
        etherscan_chunks = list(
            get_chunks(all_tokens, n=ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH))
        other_chunks = list(
            get_chunks(all_tokens, n=OTHER_MAX_TOKEN_CHUNK_LENGTH))
        now = ts_now()
        token_usd_price: Dict[EthereumToken, Price] = {}
        result = {}

        for address in addresses:
            saved_list = self.db.get_tokens_for_address_if_time(
                address=address, current_time=now)
            if force_detection or saved_list is None:
                balances = self.detect_tokens_for_address(
                    address=address,
                    token_usd_price=token_usd_price,
                    etherscan_chunks=etherscan_chunks,
                    other_chunks=other_chunks,
                )
            else:
                if len(saved_list) == 0:
                    continue  # Do not query if we know the address has no tokens

                balances = defaultdict(FVal)
                self._get_tokens_balance_and_price(
                    address=address,
                    tokens=[x.to_custom_ethereum_token() for x in saved_list],
                    balances=balances,
                    token_usd_price=token_usd_price,
                    call_order=None,  # use defaults
                )

            result[address] = balances

        return result, token_usd_price
Exemplo n.º 27
0
 def __init__(
         self,
         ethereum_manager: 'EthereumManager',  # pylint: disable=unused-argument
         base_tools: 'BaseDecoderTools',  # pylint: disable=unused-argument
         msg_aggregator: 'MessagesAggregator',  # pylint: disable=unused-argument
 ) -> None:
     super().__init__(
         ethereum_manager=ethereum_manager,
         base_tools=base_tools,
         msg_aggregator=msg_aggregator,
     )
     jars = GlobalDBHandler().get_ethereum_tokens(protocol=PICKLE_JAR_PROTOCOL)
     self.pickle_contracts = {jar.ethereum_address for jar in jars}
Exemplo n.º 28
0
def export_assets_from_file(
    dirpath: Optional[Path],
    db_handler: 'DBHandler',
) -> Path:
    """
    Creates a zip file with a json file containing the assets added by the user.
    May raise:
    - PermissionError if the temp file can't be created
    """
    if dirpath is None:
        dirpath = Path(tempfile.TemporaryDirectory().name)
        dirpath.mkdir(parents=True, exist_ok=True)

    assets = GlobalDBHandler().get_user_added_assets(user_db=db_handler)
    serialized = []
    for asset_identifier in assets:
        try:
            asset = Asset(asset_identifier)
            serialized.append(asset.to_dict())
        except UnknownAsset as e:
            log.error(e)

    cursor = GlobalDBHandler().conn.cursor()
    query = cursor.execute('SELECT value from settings WHERE name="version";')
    version = query.fetchone()[0]
    data = {
        'version': version,
        'assets': serialized,
    }

    zip_path = dirpath / 'assets.zip'
    with ZipFile(file=zip_path, mode='w', compression=ZIP_DEFLATED) as assets_zip:
        assets_zip.writestr(
            zinfo_or_arcname='assets.json',
            data=json.dumps(data),
        )

    return zip_path
Exemplo n.º 29
0
def query_binance_exchange_pairs(location: Location) -> Dict[str, BinancePair]:
    """Query all the binance pairs for a valid binance location (binance or binanceus).
    This function first tries to update the list of known pairs and store them in the database.
    If it fails tries to return available information in the database.

    May raise:
    - InputError when adding the pairs to the database fails
    """
    db = GlobalDBHandler()
    last_pair_check_ts = Timestamp(
        db.get_setting_value(f'binance_pairs_queried_at_{location}', 0),
    )
    gdb_binance = GlobalDBBinance(db)

    assert location in (Location.BINANCE, Location.BINANCEUS), f'Invalid location used as argument for binance pair query. {location}'  # noqa: E501
    if location == Location.BINANCE:
        url = 'https://api.binance.com/api/v3/exchangeInfo'
    elif location == Location.BINANCEUS:
        url = 'https://api.binance.us/api/v3/exchangeInfo'

    if ts_now() - last_pair_check_ts > DAY_IN_SECONDS:
        try:
            data = requests.get(url)
            pairs = create_binance_symbols_to_pair(
                exchange_data=data.json(),
                location=location,
            )
        except (JSONDecodeError, requests.exceptions.RequestException) as e:
            log.debug(f'Failed to obtain market pairs from binance. {str(e)}')
            # If request fails try to get them from the database
            database_pairs = gdb_binance.get_all_binance_pairs(location)
            return {pair.symbol: pair for pair in database_pairs}
        gdb_binance.save_all_binance_pairs(new_pairs=pairs.values(), location=location)
    else:
        database_pairs = gdb_binance.get_all_binance_pairs(location)
        pairs = {pair.symbol: pair for pair in database_pairs}
    return pairs
Exemplo n.º 30
0
def get_asset_by_symbol(
        symbol: str,
        asset_type: Optional[AssetType] = None) -> Optional[Asset]:
    """Gets an asset by symbol from the DB.

    If no asset with that symbol or multiple assets with the same
    symbol are found returns None
    """
    if symbol == 'ETH':
        return A_ETH  # ETH can be ETH and ETH2 in the DB

    assets_data = GlobalDBHandler().get_assets_with_symbol(symbol, asset_type)
    if len(assets_data) != 1:
        return None

    return Asset(assets_data[0].identifier)