Exemplo n.º 1
0
    def query_tokens_for_addresses(
        self,
        addresses: List[ChecksumEthAddress],
        force_detection: bool,
    ) -> TokensReturn:
        """Queries/detects token balances for a list of addresses

        If an address's tokens were recently autodetected they are not detected again but the
        balances are simply queried. Unless force_detection is True.

        Returns the token balances of each address and the usd prices of the tokens
        """
        log.debug(
            'Querying/detecting token balances for all addresses',
            force_detection=force_detection,
        )
        all_tokens = GlobalDBHandler().get_ethereum_tokens(exceptions=[
            # Ignore the veCRV balance in token query. It's already detected by
            # defi SDK as part of locked CRV in Vote Escrowed CRV. Which is the right way
            # to approach it as there is no way to assign a price to 1 veCRV. It
            # can be 1 CRV locked for 4 years or 4 CRV locked for 1 year etc.
            string_to_ethereum_address(
                '0x5f3b5DfEb7B28CDbD7FAba78963EE202a494e2A2'),
        ])
        # With etherscan with chunks > 120, we get request uri too large
        # so the limitation is not in the gas, but in the request uri length
        etherscan_chunks = list(
            get_chunks(all_tokens, n=ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH))
        other_chunks = list(
            get_chunks(all_tokens, n=OTHER_MAX_TOKEN_CHUNK_LENGTH))
        now = ts_now()
        token_usd_price: Dict[EthereumToken, Price] = {}
        result = {}

        for address in addresses:
            saved_list = self.db.get_tokens_for_address_if_time(
                address=address, current_time=now)
            if force_detection or saved_list is None:
                balances = self.detect_tokens_for_address(
                    address=address,
                    token_usd_price=token_usd_price,
                    etherscan_chunks=etherscan_chunks,
                    other_chunks=other_chunks,
                )
            else:
                if len(saved_list) == 0:
                    continue  # Do not query if we know the address has no tokens

                balances = defaultdict(FVal)
                self._get_tokens_balance_and_price(
                    address=address,
                    tokens=[x.to_custom_ethereum_token() for x in saved_list],
                    balances=balances,
                    token_usd_price=token_usd_price,
                    call_order=None,  # use defaults
                )

            result[address] = balances

        return result, token_usd_price
Exemplo n.º 2
0
    def query_tokens_for_addresses(
        self,
        addresses: List[ChecksumEthAddress],
        force_detection: bool,
    ) -> TokensReturn:
        """Queries/detects token balances for a list of addresses

        If an address's tokens were recently autodetected they are not detected again but the
        balances are simply queried. Unless force_detection is True.

        Returns the token balances of each address and the usd prices of the tokens
        """
        log.debug(
            'Querying/detecting token balances for all addresses',
            force_detection=force_detection,
        )
        all_tokens = GlobalDBHandler().get_ethereum_tokens()
        # With etherscan with chunks > 120, we get request uri too large
        # so the limitation is not in the gas, but in the request uri length
        etherscan_chunks = list(
            get_chunks(all_tokens, n=ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH))
        other_chunks = list(
            get_chunks(all_tokens, n=OTHER_MAX_TOKEN_CHUNK_LENGTH))
        now = ts_now()
        token_usd_price: Dict[EthereumToken, Price] = {}
        result = {}

        for address in addresses:
            saved_list = self.db.get_tokens_for_address_if_time(
                address=address, current_time=now)
            if force_detection or saved_list is None:
                balances = self.detect_tokens_for_address(
                    address=address,
                    token_usd_price=token_usd_price,
                    etherscan_chunks=etherscan_chunks,
                    other_chunks=other_chunks,
                )
            else:
                if len(saved_list) == 0:
                    continue  # Do not query if we know the address has no tokens

                balances = defaultdict(FVal)
                self._get_tokens_balance_and_price(
                    address=address,
                    tokens=[x.to_custom_ethereum_token() for x in saved_list],
                    balances=balances,
                    token_usd_price=token_usd_price,
                    call_order=None,  # use defaults
                )

            result[address] = balances

        return result, token_usd_price
Exemplo n.º 3
0
def pairs_from_ethereum(ethereum: EthereumManager) -> Dict[str, Any]:
    """Detect the uniswap v2 pool tokens by using an ethereum node"""
    contracts_file = Path(__file__).resolve().parent / 'contracts.json'
    with contracts_file.open('r') as f:
        contracts = json.loads(f.read())

    univ2factory = EthereumContract(
        address=contracts['UNISWAPV2FACTORY']['address'],
        abi=contracts['UNISWAPV2FACTORY']['abi'],
        deployed_block=0,  # whatever
    )
    pairs_num = univ2factory.call(ethereum, 'allPairsLength')
    chunks = list(get_chunks([[x] for x in range(pairs_num)], n=500))
    pairs = []
    for idx, chunk in enumerate(chunks):
        print(f'Querying univ2 pairs chunk {idx + 1} / {len(chunks)}')
        result = multicall_specific(ethereum, univ2factory, 'allPairs', chunk)
        try:
            pairs.extend([deserialize_ethereum_address(x[0]) for x in result])
        except DeserializationError:
            print(
                'Error deserializing address while fetching uniswap v2 pool tokens'
            )
            sys.exit(1)

    return pairs
Exemplo n.º 4
0
def uniswap_lp_token_balances(
    userdb: 'DBHandler',
    address: ChecksumEthAddress,
    ethereum: 'EthereumManager',
    lp_addresses: List[ChecksumEthAddress],
    known_assets: Set[EthereumToken],
    unknown_assets: Set[EthereumToken],
) -> List[LiquidityPool]:
    """Query uniswap token balances from ethereum chain

    The number of addresses to query in one call depends a lot on the node used.
    With an infura node we saw the following:
    500 addresses per call took on average 43 seconds for 20450 addresses
    2000 addresses per call took on average 36 seconds for 20450 addresses
    4000 addresses per call took on average 32.6 seconds for 20450 addresses
    5000 addresses timed out a few times
    """
    zerion_contract = EthereumContract(
        address=ZERION_ADAPTER_ADDRESS,
        abi=ZERION_ABI,
        deployed_block=1586199170,
    )
    if NodeName.OWN in ethereum.web3_mapping:
        chunks = list(get_chunks(lp_addresses, n=4000))
        call_order = [NodeName.OWN]
    else:
        chunks = list(get_chunks(lp_addresses, n=700))
        call_order = ethereum.default_call_order(skip_etherscan=True)

    balances = []
    for chunk in chunks:
        result = zerion_contract.call(
            ethereum=ethereum,
            method_name='getAdapterBalance',
            arguments=[
                address, '0x4EdBac5c8cb92878DD3fd165e43bBb8472f34c3f', chunk
            ],
            call_order=call_order,
        )

        for entry in result[1]:
            balances.append(
                _decode_result(userdb, entry, known_assets, unknown_assets))

    return balances
Exemplo n.º 5
0
    def get_balance_history(
            self, validator_indices: List[int]) -> Dict[int, ValidatorBalance]:
        """Get the balance history of all the validators given from the indices list

        https://beaconcha.in/api/v1/docs/index.html#/Validator/get_api_v1_validator__indexOrPubkey__balancehistory

        Queries in chunks of 100 due to api limitations.

        NOTICE: Do not use yet. The results seem incosistent. The list can accept
        up to 100 validators, but the balance history is for the last 100 epochs
        of each validator, limited to 100 results. So it's not really useful.

        Their devs said they will have a look as this may not be desired behaviour.

        May raise:
        - RemoteError due to problems querying beaconcha.in API
        """
        chunks = list(get_chunks(validator_indices, n=100))
        data = []
        for chunk in chunks:
            result = self._query(
                module='validator',
                endpoint='balancehistory',
                encoded_args=','.join(str(x) for x in chunk),
            )
            if isinstance(result, list):
                data.extend(result)
            else:
                data.append(result)

        # We are only interested in last epoch, so get its value
        balances: Dict[int, ValidatorBalance] = {}
        try:
            for entry in data:
                index = entry['validatorindex']
                epoch = entry['epoch']
                if index in balances and balances[index].epoch >= epoch:
                    continue

                balances[index] = ValidatorBalance(
                    epoch=epoch,
                    balance=entry['balance'],
                    effective_balance=entry['effectivebalance'],
                )
        except KeyError as e:
            raise RemoteError(
                f'Beaconchai.in balance response processing error. Missing key entry {str(e)}',
            ) from e

        return balances
Exemplo n.º 6
0
def _calculate_query_chunks(
    indices_or_pubkeys: Union[List[int], List[Eth2PubKey]],
) -> Union[List[List[int]], List[List[Eth2PubKey]]]:
    """Create chunks of queries.

    Beaconcha.in allows up to 100 validator or public keys in one query for most calls.
    Also has a URI length limit of ~8190, so seems no more than 80 public keys can be per call.
    """
    if len(indices_or_pubkeys) == 0:
        return []  # type: ignore

    n = 100
    if isinstance(indices_or_pubkeys[0], str):
        n = 80
    return list(get_chunks(indices_or_pubkeys, n=n))  # type: ignore
Exemplo n.º 7
0
    def get_performance(
        self,
        indices_or_pubkeys: Union[List[int], List[str]],
    ) -> Union[Dict[int, ValidatorPerformance], Dict[str,
                                                     ValidatorPerformance]]:
        """Get the performance of all the validators given from the list of indices or pubkeys

        Queries in chunks of 100 due to api limitations

        May raise:
        - RemoteError due to problems querying beaconcha.in API
        """
        chunks = list(get_chunks(indices_or_pubkeys, n=100))  # type: ignore
        data = []
        for chunk in chunks:
            result = self._query(
                module='validator',
                endpoint='performance',
                encoded_args=','.join(str(x) for x in chunk),
            )
            if isinstance(result, list):
                data.extend(result)
            else:
                data.append(result)

        try:
            performance = {}
            for entry in data:
                index = entry['validatorindex']
                performance[index] = ValidatorPerformance(
                    balance=entry['balance'],
                    performance_1d=entry['performance1d'],
                    performance_1w=entry['performance7d'],
                    performance_1m=entry['performance31d'],
                    performance_1y=entry['performance365d'],
                )
        except KeyError as e:
            raise RemoteError(
                f'Beaconchai.in performance response processing error. Missing key entry {str(e)}',
            ) from e

        return performance
Exemplo n.º 8
0
    def get_balance(
            self, validator_indices: List[int]) -> Dict[int, ValidatorBalance]:
        """Get the balance of all the validators given from the indices list

        Queries in chunks of 100 due to api limitations

        May raise:
        - RemoteError due to problems querying beaconcha.in API
        """
        chunks = list(get_chunks(validator_indices, n=100))
        data = []
        for chunk in chunks:
            result = self._query(
                module='validator',
                endpoint='balancehistory',
                encoded_args=','.join(str(x) for x in chunk),
            )
            if isinstance(result, list):
                data.extend(result)
            else:
                data.append(result)

        # We are only interested in last epoch, so get its value
        balances: Dict[int, ValidatorBalance] = {}
        try:
            for entry in data:
                index = entry['validatorindex']
                epoch = entry['epoch']
                if index in balances and balances[index].epoch >= epoch:
                    continue

                balances[index] = ValidatorBalance(
                    epoch=epoch,
                    balance=entry['balance'],
                    effective_balance=entry['effectivebalance'],
                )
        except KeyError as e:
            raise RemoteError(
                f'Beaconchai.in balance response processing error. Missing key entry {str(e)}',
            )

        return balances
Exemplo n.º 9
0
    def _query_chain_for_all_balances(self,
                                      account: ChecksumEthAddress) -> List:
        if NodeName.OWN in self.ethereum.web3_mapping:
            try:
                # In this case we don't care about the gas limit
                return self.contract.call(
                    ethereum=self.ethereum,
                    method_name='getBalances',
                    arguments=[account],
                    call_order=[NodeName.OWN, NodeName.ONEINCH],
                )
            except RemoteError:
                log.warning(
                    'Failed to query zerionsdk balances with own node. Falling '
                    'back to multiple calls to getProtocolBalances', )

        # but if we are not connected to our own node the zerion sdk get balances call
        # has unfortunately crossed the default limits of almost all open nodes apart from 1inch
        # https://github.com/rotki/rotki/issues/1969
        # So now we get all supported protocols and query in batches
        protocol_names = self._get_protocol_names()
        result = []
        protocol_chunks: List[List[str]] = list(
            get_chunks(
                list(protocol_names),
                n=PROTOCOLS_QUERY_NUM,
            ))
        for protocol_names in protocol_chunks:
            contract_result = self.contract.call(
                ethereum=self.ethereum,
                method_name='getProtocolBalances',
                arguments=[account, protocol_names],
            )
            if len(contract_result) == 0:
                continue
            result.extend(contract_result)

        return result
Exemplo n.º 10
0
    def query_tokens_for_addresses(
        self,
        addresses: List[ChecksumEthAddress],
        force_detection: bool,
    ) -> TokensReturn:
        """Queries/detects token balances for a list of addresses

        If an address's tokens were recently autodetected they are not detected again but the
        balances are simply queried. Unless force_detection is True.

        Returns the token balances of each address and the usd prices of the tokens
        """
        log.debug(
            'Querying/detecting token balances for all addresses',
            force_detection=force_detection,
        )
        ignored_assets = self.db.get_ignored_assets()
        exceptions = [
            # Ignore the veCRV balance in token query. It's already detected by
            # defi SDK as part of locked CRV in Vote Escrowed CRV. Which is the right way
            # to approach it as there is no way to assign a price to 1 veCRV. It
            # can be 1 CRV locked for 4 years or 4 CRV locked for 1 year etc.
            string_to_ethereum_address(
                '0x5f3b5DfEb7B28CDbD7FAba78963EE202a494e2A2'),
            # Ignore for now xsushi since is queried by defi SDK. We'll do it for now
            # since the SDK entry might return other tokens from sushi and we don't
            # fully support sushi now.
            string_to_ethereum_address(
                '0x8798249c2E607446EfB7Ad49eC89dD1865Ff4272'),
            # Ignore the following tokens. They are old tokens of upgraded contracts which
            # duplicated the balances at upgrade instead of doing a token swap.
            # e.g.: https://github.com/rotki/rotki/issues/3548
            # TODO: At some point we should actually remove them from the DB and
            # upgrade possible occurences in the user DB
            #
            # Old contract of Fetch.ai
            string_to_ethereum_address(
                '0x1D287CC25dAD7cCaF76a26bc660c5F7C8E2a05BD'),
        ]
        for asset in ignored_assets:  # don't query for the ignored tokens
            if asset.is_eth_token(
            ):  # type ignore since we know asset is a token
                exceptions.append(
                    EthereumToken.from_asset(
                        asset).ethereum_address)  # type: ignore
        all_tokens = GlobalDBHandler().get_ethereum_tokens(
            exceptions=exceptions,
            except_protocols=['balancer'],
        )
        # With etherscan with chunks > 120, we get request uri too large
        # so the limitation is not in the gas, but in the request uri length
        etherscan_chunks = list(
            get_chunks(all_tokens, n=ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH))
        other_chunks = list(
            get_chunks(all_tokens, n=OTHER_MAX_TOKEN_CHUNK_LENGTH))
        now = ts_now()
        token_usd_price: Dict[EthereumToken, Price] = {}
        result = {}

        for address in addresses:
            saved_list = self.db.get_tokens_for_address_if_time(
                address=address, current_time=now)
            if force_detection or saved_list is None:
                balances = self.detect_tokens_for_address(
                    address=address,
                    token_usd_price=token_usd_price,
                    etherscan_chunks=etherscan_chunks,
                    other_chunks=other_chunks,
                )
            else:
                if len(saved_list) == 0:
                    continue  # Do not query if we know the address has no tokens

                balances = defaultdict(FVal)
                self._get_tokens_balance_and_price(
                    address=address,
                    tokens=saved_list,
                    balances=balances,
                    token_usd_price=token_usd_price,
                    call_order=None,  # use defaults
                )

            result[address] = balances

        return result, token_usd_price