示例#1
0
    def _maybe_decode_erc20_approve(
            self,
            token: Optional[EthereumToken],
            tx_log: EthereumTxReceiptLog,
            transaction: EthereumTransaction,
            decoded_events: List[HistoryBaseEntry],  # pylint: disable=unused-argument
            action_items: List[ActionItem],  # pylint: disable=unused-argument
    ) -> Optional[HistoryBaseEntry]:
        if tx_log.topics[0] != ERC20_APPROVE or token is None:
            return None

        owner_address = hex_or_bytes_to_address(tx_log.topics[1])
        spender_address = hex_or_bytes_to_address(tx_log.topics[2])

        if not any(
                self.base.is_tracked(x)
                for x in (owner_address, spender_address)):
            return None

        amount_raw = hex_or_bytes_to_int(tx_log.data)
        amount = token_normalized_value(token_amount=amount_raw, token=token)
        notes = f'Approve {amount} {token.symbol} of {owner_address} for spending by {spender_address}'  # noqa: E501
        return HistoryBaseEntry(
            event_identifier=transaction.tx_hash.hex(),
            sequence_index=self.base.get_sequence_index(tx_log),
            timestamp=ts_sec_to_ms(transaction.timestamp),
            location=Location.BLOCKCHAIN,
            location_label=owner_address,
            asset=token,
            balance=Balance(amount=amount),
            notes=notes,
            event_type=HistoryEventType.INFORMATIONAL,
            event_subtype=HistoryEventSubType.APPROVE,
            counterparty=spender_address,
        )
示例#2
0
文件: tokens.py 项目: zalam003/rotki
    def _get_multitoken_account_balance(
        self,
        tokens: List[CustomEthereumTokenWithIdentifier],
        account: ChecksumEthAddress,
        call_order: Optional[Sequence[NodeName]],
    ) -> Dict[str, FVal]:
        """Queries balances of multiple tokens for an account

        Return a dictionary with keys being tokens and value a dictionary of
        account to balances

        May raise:
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        - BadFunctionCallOutput if a local node is used and the contract for the
          token has no code. That means the chain is not synced
        """
        log.debug(
            'Querying ethereum chain for multi token account balances',
            eth_address=account,
            tokens_num=len(tokens),
        )
        balances: Dict[str, FVal] = {}
        result = ETH_SCAN.call(
            ethereum=self.ethereum,
            method_name='tokensBalance',
            arguments=[account, [x.address for x in tokens]],
            call_order=call_order,
        )
        for tk_idx, token in enumerate(tokens):
            token_amount = result[tk_idx]
            if token_amount != 0:
                balances[token.identifier] = token_normalized_value(
                    token_amount, token)
        return balances
示例#3
0
文件: zerion.py 项目: toro09/rotki
    def _get_single_balance(
            self, entry: Tuple[Tuple[str, str, str, int], int]) -> DefiBalance:
        metadata = entry[0]
        balance_value = entry[1]
        decimals = metadata[3]
        normalized_value = token_normalized_value(balance_value, decimals)
        token_symbol = metadata[2]

        try:
            asset = Asset(token_symbol)
            usd_price = Inquirer().find_usd_price(asset)
        except (UnknownAsset, UnsupportedAsset):
            if not _is_symbol_non_standard(token_symbol):
                self.msg_aggregator.add_error(
                    f'Unsupported asset {token_symbol} encountered during DeFi protocol queries',
                )
            usd_price = Price(ZERO)

        usd_value = normalized_value * usd_price
        defi_balance = DefiBalance(
            token_address=to_checksum_address(metadata[0]),
            token_name=metadata[1],
            token_symbol=token_symbol,
            balance=Balance(amount=normalized_value, usd_value=usd_value),
        )
        return defi_balance
示例#4
0
    def decode_makerdao_debt_payback(  # pylint: disable=no-self-use
        self,
        tx_log: EthereumTxReceiptLog,
        transaction: EthereumTransaction,  # pylint: disable=unused-argument
        decoded_events: List[HistoryBaseEntry],  # pylint: disable=unused-argument
        all_logs: List[EthereumTxReceiptLog],  # pylint: disable=unused-argument
        action_items: List[ActionItem],  # pylint: disable=unused-argument
    ) -> Tuple[Optional[HistoryBaseEntry], Optional[ActionItem]]:
        if tx_log.topics[0] == GENERIC_JOIN:
            join_user_address = hex_or_bytes_to_address(tx_log.topics[2])
            raw_amount = hex_or_bytes_to_int(tx_log.topics[3])
            amount = token_normalized_value(
                token_amount=raw_amount,
                token=A_DAI,
            )
            # The transfer comes right before, but we don't have enough information
            # yet to make sure that this transfer is indeed a vault payback debt. We
            # need to get a cdp frob event and compare vault id to address matches
            action_item = ActionItem(
                action='transform',
                sequence_index=tx_log.log_index,
                from_event_type=HistoryEventType.SPEND,
                from_event_subtype=HistoryEventSubType.NONE,
                asset=A_DAI,
                amount=amount,
                to_event_subtype=HistoryEventSubType.PAYBACK_DEBT,
                to_counterparty=CPT_VAULT,
                extra_data={'vault_address': join_user_address},
            )
            return None, action_item

        return None, None
示例#5
0
    def _decode_redeem(
            self,
            tx_log: EthereumTxReceiptLog,
            decoded_events: List[HistoryBaseEntry],
            compound_token: EthereumToken,
    ) -> Tuple[Optional[HistoryBaseEntry], Optional[ActionItem]]:
        redeemer = hex_or_bytes_to_address(tx_log.data[0:32])
        if not self.base.is_tracked(redeemer):
            return None, None

        redeem_amount_raw = hex_or_bytes_to_int(tx_log.data[32:64])
        redeem_tokens_raw = hex_or_bytes_to_int(tx_log.data[64:96])
        underlying_token = symbol_to_asset_or_token(compound_token.symbol[1:])
        redeem_amount = asset_normalized_value(redeem_amount_raw, underlying_token)
        redeem_tokens = token_normalized_value(redeem_tokens_raw, compound_token)
        out_event = in_event = None
        for event in decoded_events:
            # Find the transfer event which should have come before the redeeming
            if event.event_type == HistoryEventType.RECEIVE and event.asset == underlying_token and event.balance.amount == redeem_amount:  # noqa: E501
                event.event_type = HistoryEventType.WITHDRAWAL
                event.event_subtype = HistoryEventSubType.REMOVE_ASSET
                event.counterparty = CPT_COMPOUND
                event.notes = f'Withdraw {redeem_amount} {underlying_token.symbol} from compound'
                in_event = event
            if event.event_type == HistoryEventType.SPEND and event.asset == compound_token and event.balance.amount == redeem_tokens:  # noqa: E501
                event.event_type = HistoryEventType.SPEND
                event.event_subtype = HistoryEventSubType.RETURN_WRAPPED
                event.counterparty = CPT_COMPOUND
                event.notes = f'Return {redeem_tokens} {compound_token.symbol} to compound'
                out_event = event

        maybe_reshuffle_events(out_event=out_event, in_event=in_event, events_list=decoded_events)
        return None, None
示例#6
0
文件: compound.py 项目: rotki/rotki
    def _get_comp_events(
        self,
        address: ChecksumEthAddress,
        from_ts: Timestamp,
        to_ts: Timestamp,
    ) -> List[CompoundEvent]:
        self.ethereum.get_blocknumber_by_time(from_ts)
        from_block = max(
            COMP_DEPLOYED_BLOCK,
            self.ethereum.get_blocknumber_by_time(from_ts),
        )
        argument_filters = {
            'from': COMPTROLLER_PROXY.address,
            'to': address,
        }
        comp_events = self.ethereum.get_logs(
            contract_address=A_COMP.ethereum_address,
            abi=ERC20TOKEN_ABI,
            event_name='Transfer',
            argument_filters=argument_filters,
            from_block=from_block,
            to_block=self.ethereum.get_blocknumber_by_time(to_ts),
        )

        events = []
        for event in comp_events:
            timestamp = self.ethereum.get_event_timestamp(event)
            tx_hash = event['transactionHash']
            amount = token_normalized_value(hexstr_to_int(event['data']),
                                            A_COMP)
            usd_price = query_usd_price_zero_if_error(
                asset=A_COMP,
                time=timestamp,
                location=f'comp_claim {tx_hash}',
                msg_aggregator=self.msg_aggregator,
            )
            value = Balance(amount, amount * usd_price)
            events.append(
                CompoundEvent(
                    event_type='comp',
                    address=address,
                    block_number=event['blockNumber'],
                    timestamp=timestamp,
                    asset=A_COMP,
                    value=value,
                    to_asset=None,
                    to_value=None,
                    realized_pnl=value,
                    tx_hash=tx_hash,
                    log_index=event['logIndex'],
                ))

        return events
示例#7
0
    def _decode_mint(
            self,
            transaction: EthereumTransaction,
            tx_log: EthereumTxReceiptLog,
            decoded_events: List[HistoryBaseEntry],
            compound_token: EthereumToken,
    ) -> Tuple[Optional[HistoryBaseEntry], Optional[ActionItem]]:
        minter = hex_or_bytes_to_address(tx_log.data[0:32])
        if not self.base.is_tracked(minter):
            return None, None

        mint_amount_raw = hex_or_bytes_to_int(tx_log.data[32:64])
        minted_amount_raw = hex_or_bytes_to_int(tx_log.data[64:96])
        underlying_asset = symbol_to_asset_or_token(compound_token.symbol[1:])
        mint_amount = asset_normalized_value(mint_amount_raw, underlying_asset)
        minted_amount = token_normalized_value(minted_amount_raw, compound_token)
        out_event = None
        for event in decoded_events:
            # Find the transfer event which should have come before the minting
            if event.event_type == HistoryEventType.SPEND and event.asset == underlying_asset and event.balance.amount == mint_amount:  # noqa: E501
                event.event_type = HistoryEventType.DEPOSIT
                event.event_subtype = HistoryEventSubType.DEPOSIT_ASSET
                event.counterparty = CPT_COMPOUND
                event.notes = f'Deposit {mint_amount} {underlying_asset.symbol} to compound'
                out_event = event
                break

        if out_event is None:
            log.debug(f'At compound mint decoding of tx {transaction.tx_hash.hex()} the out event was not found')  # noqa: E501
            return None, None

        # also create an action item for the receive of the cTokens
        action_item = ActionItem(
            action='transform',
            sequence_index=tx_log.log_index,
            from_event_type=HistoryEventType.RECEIVE,
            from_event_subtype=HistoryEventSubType.NONE,
            asset=compound_token,
            amount=minted_amount,
            to_event_subtype=HistoryEventSubType.RECEIVE_WRAPPED,
            to_notes=f'Receive {minted_amount} {compound_token.symbol} from compound',
            to_counterparty=CPT_COMPOUND,
            paired_event_data=(out_event, True),
        )
        return None, action_item
示例#8
0
def test_detected_tokens_cache(ethtokens, inquirer):  # pylint: disable=unused-argument
    """Test that a cache of the detected tokens is created and used at subsequent queries.

    Also test that the cache can be ignored and recreated with a forced redetection
    """
    addr1 = make_ethereum_address()
    addr2 = make_ethereum_address()
    eth_map = {addr1: {A_GNO: 5000, A_MKR: 4000}, addr2: {A_MKR: 6000}}
    etherscan_patch = mock_etherscan_query(
        eth_map=eth_map,
        etherscan=ethtokens.ethereum.etherscan,
        original_queries=None,
        original_requests_get=requests.get,
        extra_flags=None,
    )
    ethtokens_max_chunks_patch = patch(
        'rotkehlchen.chain.ethereum.tokens.ETHERSCAN_MAX_TOKEN_CHUNK_LENGTH',
        new=800,
    )

    with ethtokens_max_chunks_patch, etherscan_patch as etherscan_mock:
        # Initially autodetect the tokens at the first call
        result1, _ = ethtokens.query_tokens_for_addresses([addr1, addr2],
                                                          False)
        initial_call_count = etherscan_mock.call_count

        # Then in second call autodetect queries should not have been made, and DB cache used
        result2, _ = ethtokens.query_tokens_for_addresses([addr1, addr2],
                                                          False)
        call_count = etherscan_mock.call_count
        assert call_count == initial_call_count + 2

        # In the third call force re-detection
        result3, _ = ethtokens.query_tokens_for_addresses([addr1, addr2], True)
        call_count = etherscan_mock.call_count
        assert call_count == initial_call_count + 2 + initial_call_count

        assert result1 == result2 == result3
        assert len(result1) == len(eth_map)
        for key, entry in result1.items():
            eth_map_entry = eth_map[key]
            assert len(entry) == len(eth_map_entry)
            for token, val in entry.items():
                assert token_normalized_value(eth_map_entry[token],
                                              token) == val
示例#9
0
    def _decode_vault_debt_generation(
        self,
        tx_log: EthereumTxReceiptLog,
        transaction: EthereumTransaction,  # pylint: disable=unused-argument
        decoded_events: List[HistoryBaseEntry],  # pylint: disable=unused-argument
        all_logs: List[EthereumTxReceiptLog],  # pylint: disable=unused-argument
    ) -> Tuple[Optional[HistoryBaseEntry], Optional[ActionItem]]:
        """Decode vault debt generation by parsing a lognote for cdpmanager move"""
        cdp_id = hex_or_bytes_to_int(tx_log.topics[2])
        destination = hex_or_bytes_to_address(tx_log.topics[3])

        owner = self._get_address_or_proxy(destination)
        if owner is None:
            return None, None

        # now we need to get the rad and since it's the 3rd argument its not in the indexed topics
        # but it's part of the data location after the first 132 bytes.
        # also need to shift by ray since it's in rad
        raw_amount = shift_num_right_by(
            hex_or_bytes_to_int(tx_log.data[132:164]), RAY_DIGITS)
        amount = token_normalized_value(
            token_amount=raw_amount,
            token=A_DAI,
        )

        # The transfer event appears after the debt generation event, so we need to transform it
        action_item = ActionItem(
            action='transform',
            sequence_index=tx_log.log_index,
            from_event_type=HistoryEventType.RECEIVE,
            from_event_subtype=HistoryEventSubType.NONE,
            asset=A_DAI,
            amount=amount,
            to_event_type=HistoryEventType.WITHDRAWAL,
            to_event_subtype=HistoryEventSubType.GENERATE_DEBT,
            to_counterparty=CPT_VAULT,
            to_notes=f'Generate {amount} DAI from MakerDAO vault {cdp_id}',
            extra_data={'cdp_id': cdp_id},
        )
        return None, action_item
示例#10
0
    def _get_multitoken_multiaccount_balance(
        self,
        tokens: List[EthTokenInfo],
        accounts: List[ChecksumEthAddress],
    ) -> Dict[str, Dict[ChecksumEthAddress, FVal]]:
        """Queries a list of accounts for balances of multiple tokens

        Return a dictionary with keys being tokens and value a dictionary of
        account to balances

        May raise:
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        - BadFunctionCallOutput if a local node is used and the contract for the
          token has no code. That means the chain is not synced
        """
        log.debug(
            'Querying ethereum chain for multi token multi account balances',
            eth_addresses=accounts,
            tokens_num=len(tokens),
        )
        balances: Dict[str, Dict[ChecksumEthAddress, FVal]] = defaultdict(dict)
        result = ETH_SCAN.call(
            ethereum=self.ethereum,
            method_name='tokensBalances',
            arguments=[accounts, [x.address for x in tokens]],
        )
        for acc_idx, account in enumerate(accounts):
            for tk_idx, token in enumerate(tokens):
                token_amount = result[acc_idx][tk_idx]
                if token_amount != 0:
                    balances[
                        token.identifier][account] = token_normalized_value(
                            token_amount=token_amount,
                            token=token,
                        )
        return balances
示例#11
0
    def get_multiaccount_token_balance(
        self,
        token: EthereumToken,
        accounts: List[ChecksumEthAddress],
    ) -> Dict[ChecksumEthAddress, FVal]:
        """Queries a list of accounts for balances of a single token

        Return a dictionary with keys being accounts and value balances of token
        Balance value is normalized through the token decimals.

        May raise:
        - RemoteError if an external service such as Etherscan is queried and
          there is a problem with its query.
        - BadFunctionCallOutput if a local node is used and the contract for the
          token has no code. That means the chain is not synced
        """
        log.debug(
            'Querying ethereum chain for single token multi account balances',
            eth_addresses=accounts,
            token_address=token.ethereum_address,
            token_symbol=token.decimals,
        )
        balances = {}
        result = self.call_contract(
            contract_address=ETH_SCAN.address,
            abi=ETH_SCAN.abi,
            method_name='tokensBalances',
            arguments=[accounts, [token.ethereum_address]],
        )
        for idx, account in enumerate(accounts):
            # 0 is since we only provide 1 token here
            token_amount = result[idx][0]
            if token_amount != 0:
                balances[account] = token_normalized_value(
                    token_amount, token.decimals)
        return balances
示例#12
0
    def _query_vault_details(
        self,
        vault: MakerDAOVault,
        proxy: ChecksumEthAddress,
        urn: ChecksumEthAddress,
    ) -> Optional[MakerDAOVaultDetails]:
        # They can raise:
        # ConversionError due to hex_or_bytes_to_address, hexstr_to_int
        # RemoteError due to external query errors
        events = self.ethereum.get_logs(
            contract_address=MAKERDAO_CDP_MANAGER.address,
            abi=MAKERDAO_CDP_MANAGER.abi,
            event_name='NewCdp',
            argument_filters={'cdp': vault.identifier},
            from_block=MAKERDAO_CDP_MANAGER.deployed_block,
        )
        if len(events) == 0:
            self.msg_aggregator.add_error(
                'No events found for a Vault creation. This should never '
                'happen. Please open a bug report: https://github.com/rotki/rotki/issues',
            )
            return None
        if len(events) != 1:
            log.error(
                f'Multiple events found for a Vault creation: {events}. Taking '
                f'only the first. This should not happen. Something is wrong',
            )
            self.msg_aggregator.add_error(
                'Multiple events found for a Vault creation. This should never '
                'happen. Please open a bug report: https://github.com/rotki/rotki/issues',
            )
        creation_ts = self.ethereum.get_event_timestamp(events[0])

        # get vat frob events for cross-checking
        argument_filters = {
            'sig': '0x76088703',  # frob
            'arg1': '0x' + vault.ilk.hex(),  # ilk
            'arg2': address_to_bytes32(urn),  # urn
            # arg3 can be urn for the 1st deposit, and proxy/owner for the next ones
            # so don't filter for it
            # 'arg3': address_to_bytes32(proxy),  # proxy - owner
        }
        frob_events = self.ethereum.get_logs(
            contract_address=MAKERDAO_VAT.address,
            abi=MAKERDAO_VAT.abi,
            event_name='LogNote',
            argument_filters=argument_filters,
            from_block=MAKERDAO_VAT.deployed_block,
        )
        frob_event_tx_hashes = [x['transactionHash'] for x in frob_events]

        gemjoin = GEMJOIN_MAPPING.get(vault.collateral_type, None)
        if gemjoin is None:
            self.msg_aggregator.add_warning(
                f'Unknown makerdao vault collateral type detected {vault.collateral_type}.'
                'Skipping ...', )
            return None

        vault_events = []
        # Get the collateral deposit events
        argument_filters = {
            'sig': '0x3b4da69f',  # join
            # In cases where a CDP has been migrated from a SAI CDP to a DAI
            # Vault the usr in the first deposit will be the old address. To
            # detect the first deposit in these cases we need to check for
            # arg1 being the urn
            # 'usr': proxy,
            'arg1': address_to_bytes32(urn),
        }
        events = self.ethereum.get_logs(
            contract_address=gemjoin.address,
            abi=gemjoin.abi,
            event_name='LogNote',
            argument_filters=argument_filters,
            from_block=gemjoin.deployed_block,
        )
        # all subsequent deposits should have the proxy as a usr
        # but for non-migrated CDPS the previous query would also work
        # so in those cases we will have the first deposit 2 times
        argument_filters = {
            'sig': '0x3b4da69f',  # join
            'usr': proxy,
        }
        events.extend(
            self.ethereum.get_logs(
                contract_address=gemjoin.address,
                abi=gemjoin.abi,
                event_name='LogNote',
                argument_filters=argument_filters,
                from_block=gemjoin.deployed_block,
            ))
        deposit_tx_hashes = set()
        for event in events:
            tx_hash = event['transactionHash']
            if tx_hash in deposit_tx_hashes:
                # Skip duplicate deposit that would be detected in non migrated CDP case
                continue

            if tx_hash not in frob_event_tx_hashes:
                # If there is no corresponding frob event then skip
                continue

            deposit_tx_hashes.add(tx_hash)
            amount = asset_normalized_value(
                amount=hexstr_to_int(event['topics'][3]),
                asset=vault.collateral_asset,
            )
            timestamp = self.ethereum.get_event_timestamp(event)
            usd_price = query_usd_price_or_use_default(
                asset=vault.collateral_asset,
                time=timestamp,
                default_value=ZERO,
                location='vault collateral deposit',
            )
            vault_events.append(
                VaultEvent(
                    event_type=VaultEventType.DEPOSIT_COLLATERAL,
                    value=Balance(amount, amount * usd_price),
                    timestamp=timestamp,
                    tx_hash=tx_hash,
                ))

        # Get the collateral withdrawal events
        argument_filters = {
            'sig': '0xef693bed',  # exit
            'usr': proxy,
        }
        events = self.ethereum.get_logs(
            contract_address=gemjoin.address,
            abi=gemjoin.abi,
            event_name='LogNote',
            argument_filters=argument_filters,
            from_block=gemjoin.deployed_block,
        )
        for event in events:
            tx_hash = event['transactionHash']
            if tx_hash not in frob_event_tx_hashes:
                # If there is no corresponding frob event then skip
                continue
            amount = asset_normalized_value(
                amount=hexstr_to_int(event['topics'][3]),
                asset=vault.collateral_asset,
            )
            timestamp = self.ethereum.get_event_timestamp(event)
            usd_price = query_usd_price_or_use_default(
                asset=vault.collateral_asset,
                time=timestamp,
                default_value=ZERO,
                location='vault collateral withdrawal',
            )
            vault_events.append(
                VaultEvent(
                    event_type=VaultEventType.WITHDRAW_COLLATERAL,
                    value=Balance(amount, amount * usd_price),
                    timestamp=timestamp,
                    tx_hash=event['transactionHash'],
                ))

        total_dai_wei = 0
        # Get the dai generation events
        argument_filters = {
            'sig': '0xbb35783b',  # move
            'arg1': address_to_bytes32(urn),
            # For CDPs that were created by migrating from SAI the first DAI generation
            # during vault creation will have the old owner as arg2. So we can't
            # filter for it here. Still seems like the urn as arg1 is sufficient
            # 'arg2': address_to_bytes32(proxy),
        }
        events = self.ethereum.get_logs(
            contract_address=MAKERDAO_VAT.address,
            abi=MAKERDAO_VAT.abi,
            event_name='LogNote',
            argument_filters=argument_filters,
            from_block=MAKERDAO_VAT.deployed_block,
        )
        for event in events:
            given_amount = _shift_num_right_by(
                hexstr_to_int(event['topics'][3]), RAY_DIGITS)
            total_dai_wei += given_amount
            amount = token_normalized_value(
                token_amount=given_amount,
                token=A_DAI,
            )
            timestamp = self.ethereum.get_event_timestamp(event)
            usd_price = query_usd_price_or_use_default(
                asset=A_DAI,
                time=timestamp,
                default_value=FVal(1),
                location='vault debt generation',
            )
            vault_events.append(
                VaultEvent(
                    event_type=VaultEventType.GENERATE_DEBT,
                    value=Balance(amount, amount * usd_price),
                    timestamp=timestamp,
                    tx_hash=event['transactionHash'],
                ))

        # Get the dai payback events
        argument_filters = {
            'sig': '0x3b4da69f',  # join
            'usr': proxy,
            'arg1': address_to_bytes32(urn),
        }
        events = self.ethereum.get_logs(
            contract_address=MAKERDAO_DAI_JOIN.address,
            abi=MAKERDAO_DAI_JOIN.abi,
            event_name='LogNote',
            argument_filters=argument_filters,
            from_block=MAKERDAO_DAI_JOIN.deployed_block,
        )
        for event in events:
            given_amount = hexstr_to_int(event['topics'][3])
            total_dai_wei -= given_amount
            amount = token_normalized_value(
                token_amount=given_amount,
                token=A_DAI,
            )
            if amount == ZERO:
                # it seems there is a zero DAI value transfer from the urn when
                # withdrawing ETH. So we should ignore these as events
                continue

            timestamp = self.ethereum.get_event_timestamp(event)
            usd_price = query_usd_price_or_use_default(
                asset=A_DAI,
                time=timestamp,
                default_value=FVal(1),
                location='vault debt payback',
            )

            vault_events.append(
                VaultEvent(
                    event_type=VaultEventType.PAYBACK_DEBT,
                    value=Balance(amount, amount * usd_price),
                    timestamp=timestamp,
                    tx_hash=event['transactionHash'],
                ))

        # Get the liquidation events
        argument_filters = {'urn': urn}
        events = self.ethereum.get_logs(
            contract_address=MAKERDAO_CAT.address,
            abi=MAKERDAO_CAT.abi,
            event_name='Bite',
            argument_filters=argument_filters,
            from_block=MAKERDAO_CAT.deployed_block,
        )
        sum_liquidation_amount = ZERO
        sum_liquidation_usd = ZERO
        for event in events:
            if isinstance(event['data'], str):
                lot = event['data'][:66]
            else:  # bytes
                lot = event['data'][:32]
            amount = asset_normalized_value(
                amount=hexstr_to_int(lot),
                asset=vault.collateral_asset,
            )
            timestamp = self.ethereum.get_event_timestamp(event)
            sum_liquidation_amount += amount
            usd_price = query_usd_price_or_use_default(
                asset=vault.collateral_asset,
                time=timestamp,
                default_value=ZERO,
                location='vault collateral liquidation',
            )
            amount_usd_value = amount * usd_price
            sum_liquidation_usd += amount_usd_value
            vault_events.append(
                VaultEvent(
                    event_type=VaultEventType.LIQUIDATION,
                    value=Balance(amount, amount_usd_value),
                    timestamp=timestamp,
                    tx_hash=event['transactionHash'],
                ))

        total_interest_owed = vault.debt.amount - token_normalized_value(
            token_amount=total_dai_wei,
            token=A_DAI,
        )
        # sort vault events by timestamp
        vault_events.sort(key=lambda event: event.timestamp)

        return MakerDAOVaultDetails(
            identifier=vault.identifier,
            collateral_asset=vault.collateral_asset,
            total_interest_owed=total_interest_owed,
            creation_ts=creation_ts,
            total_liquidated=Balance(sum_liquidation_amount,
                                     sum_liquidation_usd),
            events=vault_events,
        )
示例#13
0
    def _get_vault_withdraw_events(
        self,
        vault: YearnVault,
        address: ChecksumEthAddress,
        from_block: int,
        to_block: int,
    ) -> List[YearnVaultEvent]:
        """Get all withdraw events of the underlying token to the vault"""
        events: List[YearnVaultEvent] = []
        argument_filters = {'from': vault.contract.address, 'to': address}
        withdraw_events = self.ethereum.get_logs(
            contract_address=vault.underlying_token.ethereum_address,
            abi=ERC20TOKEN_ABI,
            event_name='Transfer',
            argument_filters=argument_filters,
            from_block=from_block,
            to_block=to_block,
        )
        for withdraw_event in withdraw_events:
            timestamp = self.ethereum.get_event_timestamp(withdraw_event)
            withdraw_amount = token_normalized_value(
                token_amount=hex_or_bytes_to_int(withdraw_event['data']),
                token=vault.token,
            )
            tx_hash = withdraw_event['transactionHash']
            tx_receipt = self.ethereum.get_transaction_receipt(tx_hash)
            withdraw_index = deserialize_int_from_hex_or_int(
                withdraw_event['logIndex'],
                'yearn withdraw log index',
            )
            burn_amount = None
            for log in tx_receipt['logs']:
                found_event = (
                    log['topics'][0] ==
                    '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef'
                    and  # noqa: E501
                    log['topics'][1] == address_to_bytes32(address) and
                    log['topics'][2] == address_to_bytes32(ZERO_ADDRESS))
                if found_event:
                    # found the burn log
                    burn_amount = token_normalized_value(
                        token_amount=hex_or_bytes_to_int(log['data']),
                        token=vault.token,
                    )

            if burn_amount is None:
                self.msg_aggregator.add_error(
                    f'Ignoring yearn withdraw event with tx_hash {tx_hash} and log index '
                    f'{withdraw_index} due to inability to find corresponding burn event',
                )
                continue

            withdraw_usd_price = get_usd_price_zero_if_error(
                asset=vault.underlying_token,
                time=timestamp,
                location='yearn vault withdraw',
                msg_aggregator=self.msg_aggregator,
            )
            burn_usd_price = get_usd_price_zero_if_error(
                asset=vault.token,
                time=timestamp,
                location='yearn vault withdraw',
                msg_aggregator=self.msg_aggregator,
            )
            events.append(
                YearnVaultEvent(
                    event_type='withdraw',
                    block_number=deserialize_blocknumber(
                        withdraw_event['blockNumber']),
                    timestamp=timestamp,
                    from_asset=vault.token,
                    from_value=Balance(
                        amount=burn_amount,
                        usd_value=burn_amount * burn_usd_price,
                    ),
                    to_asset=vault.underlying_token,
                    to_value=Balance(
                        amount=withdraw_amount,
                        usd_value=withdraw_amount * withdraw_usd_price,
                    ),
                    realized_pnl=None,
                    tx_hash=tx_hash,
                    log_index=withdraw_index,
                ))

        return events
示例#14
0
    def _process_event(
        self,
        events: List[Dict[str, Any]],
        event_type: Literal['deposit', 'withdraw'],
    ) -> List[YearnVaultEvent]:
        result = []

        for entry in events:
            # The id returned is a composition of hash + '-' + log_index
            try:
                _, tx_hash, log_index, _ = entry['id'].split('-')
            except ValueError as e:
                log.debug(
                    f'Failed to extract transaction hash and log index from {event_type} event '
                    f'in yearn vaults v2 graph query. Got {entry["id"]}. {str(e)}.',
                )
                self.msg_aggregator.add_warning(
                    f'Ignoring {event_type} in yearn vault V2. Failed to read id {entry["id"]}',
                )
                continue

            try:
                if event_type == 'deposit':
                    from_asset = EthereumToken(entry['vault']['token']['id'])
                    to_asset = EthereumToken(
                        entry['vault']['shareToken']['id'])
                elif event_type == 'withdraw':
                    from_asset = EthereumToken(
                        entry['vault']['shareToken']['id'])
                    to_asset = EthereumToken(entry['vault']['token']['id'])
            except UnknownAsset:
                if event_type == 'deposit':
                    from_str = entry['vault']['token']['symbol']
                    to_str = entry['vault']['shareToken']['symbol']
                elif event_type == 'withdraw':
                    from_str = entry['vault']['shareToken']['symbol']
                    to_str = entry['vault']['token']['symbol']
                self.msg_aggregator.add_warning(
                    f'Ignoring {event_type} in yearn vaults V2 from {from_str} to '
                    f'{to_str} because the token is not recognized.', )
                continue
            except KeyError as e:
                log.debug(
                    f'Failed to extract token information from {event_type} event '
                    f'in yearn vaults v2 graph query. {str(e)}.', )
                self.msg_aggregator.add_warning(
                    f'Ignoring {event_type} {tx_hash} in yearn vault V2 Failed to decode'
                    f' remote information. ', )
                continue

            try:
                from_asset_usd_price = get_usd_price_zero_if_error(
                    asset=from_asset,
                    time=Timestamp(int(entry['timestamp']) // 1000),
                    location=f'yearn vault v2 deposit {tx_hash}',
                    msg_aggregator=self.msg_aggregator,
                )
                to_asset_usd_price = get_usd_price_zero_if_error(
                    asset=to_asset,
                    time=Timestamp(int(entry['timestamp']) // 1000),
                    location=f'yearn v2 vault deposit {tx_hash}',
                    msg_aggregator=self.msg_aggregator,
                )
                if event_type == 'deposit':
                    from_asset_amount = token_normalized_value(
                        token_amount=int(entry['tokenAmount']),
                        token=from_asset,
                    )
                    to_asset_amount = token_normalized_value(
                        token_amount=int(entry['sharesMinted']),
                        token=to_asset,
                    )
                elif event_type == 'withdraw':
                    from_asset_amount = token_normalized_value(
                        token_amount=int(entry['sharesBurnt']),
                        token=from_asset,
                    )
                    to_asset_amount = token_normalized_value(
                        token_amount=int(entry['tokenAmount']),
                        token=to_asset,
                    )
                result.append(
                    YearnVaultEvent(
                        event_type=event_type,
                        block_number=int(entry['blockNumber']),
                        timestamp=Timestamp(int(entry['timestamp']) // 1000),
                        from_asset=from_asset,
                        from_value=Balance(
                            amount=from_asset_amount,
                            usd_value=from_asset_amount * from_asset_usd_price,
                        ),
                        to_asset=to_asset,
                        to_value=Balance(
                            amount=to_asset_amount,
                            usd_value=to_asset_amount * to_asset_usd_price,
                        ),
                        realized_pnl=None,
                        tx_hash=tx_hash,
                        log_index=int(log_index),
                        version=2,
                    ))
            except (KeyError, ValueError) as e:
                msg = str(e)
                if isinstance(e, KeyError):
                    msg = f'Missing key entry for {msg}.'
                log.error(
                    f'Failed to read {event_type} from yearn vaults v2 graph because the response'
                    f' does not have the expected output.',
                    error=msg,
                )
                self.msg_aggregator.add_warning(
                    f'Ignoring {event_type} {tx_hash} in yearn vault V2 from {from_asset} to '
                    f'{to_asset} because the remote information is not correct.',
                )
                continue
        return result
示例#15
0
    def decode_pot_for_dsr(
        self,
        tx_log: EthereumTxReceiptLog,
        transaction: EthereumTransaction,  # pylint: disable=unused-argument
        decoded_events: List[HistoryBaseEntry],
        all_logs: List[EthereumTxReceiptLog],
        action_items: List[ActionItem],  # pylint: disable=unused-argument
    ) -> Tuple[Optional[HistoryBaseEntry], Optional[ActionItem]]:
        if tx_log.topics[0] == POT_JOIN:
            potjoin_user_address = hex_or_bytes_to_address(tx_log.topics[1])
            user = self._get_address_or_proxy(potjoin_user_address)
            if user is None:
                return None, None

            # Now gotta find the DAI join event to get actual DAI value
            daijoin_log = None
            for event_log in all_logs:
                if event_log.address == MAKERDAO_DAI_JOIN.address and event_log.topics[
                        0] == GENERIC_JOIN:  # noqa: E501
                    daijoin_user_address = hex_or_bytes_to_address(
                        event_log.topics[1])
                    if daijoin_user_address != potjoin_user_address:
                        continue  # not a match

                    daijoin_log = event_log
                    break

            if daijoin_log is None:
                return None, None  # no matching daijoin for potjoin

            raw_amount = hex_or_bytes_to_int(daijoin_log.topics[3])
            amount = token_normalized_value(
                token_amount=raw_amount,
                token=A_DAI,
            )

            # The transfer event should be right before
            for event in decoded_events:
                if event.asset == A_DAI and event.event_type == HistoryEventType.SPEND and event.balance.amount == amount:  # noqa: E501
                    # found the event
                    event.location_label = user
                    event.counterparty = CPT_DSR
                    event.event_type = HistoryEventType.DEPOSIT
                    event.event_subtype = HistoryEventSubType.DEPOSIT_ASSET
                    event.notes = f'Deposit {amount} DAI in the DSR'
                    return None, None

        elif tx_log.topics[0] == POT_EXIT:
            pot_exit_address = hex_or_bytes_to_address(tx_log.topics[1])
            user = self._get_address_or_proxy(pot_exit_address)
            if user is None:
                return None, None

            # Now gotta find the DAI exit event to get actual DAI value
            daiexit_log = None
            for event_log in all_logs:
                if event_log.address == MAKERDAO_DAI_JOIN.address and event_log.topics[
                        0] == GENERIC_EXIT:  # noqa: E501
                    daiexit_user_address = hex_or_bytes_to_address(
                        event_log.topics[2])
                    if daiexit_user_address != user:
                        continue  # not a match

                    daiexit_log = event_log
                    break

            if daiexit_log is None:
                return None, None  # no matching daiexit for potexit

            raw_amount = hex_or_bytes_to_int(daiexit_log.topics[3])
            amount = token_normalized_value(
                token_amount=raw_amount,
                token=A_DAI,
            )
            # The transfer event will be in a subsequent logs
            action_item = ActionItem(
                action='transform',
                sequence_index=tx_log.log_index,
                from_event_type=HistoryEventType.RECEIVE,
                from_event_subtype=HistoryEventSubType.NONE,
                asset=A_DAI,
                amount=amount,
                to_event_type=HistoryEventType.WITHDRAWAL,
                to_event_subtype=HistoryEventSubType.REMOVE_ASSET,
                to_notes=f'Withdraw {amount} DAI from the DSR',
                to_counterparty=CPT_DSR,
            )
            return None, action_item

        return None, None