Esempio n. 1
0
def get_usd_price_zero_if_error(
    asset: Asset,
    time: Timestamp,
    location: str,
    msg_aggregator: MessagesAggregator,
) -> Price:
    """A special version of query_usd_price_zero_if_error using current price instead
    of historical token price for some assets.

    Since these assets are not supported by our price oracles we derive current
    price from the chain but without an archive node can't query old prices.

    TODO: MAke an issue about this
    This can be solved when we have an archive node.
    """
    inquirer = Inquirer()
    if asset in inquirer.special_tokens:
        return inquirer.find_usd_price(asset)

    return query_usd_price_zero_if_error(
        asset=asset,
        time=time,
        location=location,
        msg_aggregator=msg_aggregator,
    )
Esempio n. 2
0
def _get_eth2_staking_deposits_onchain(
    ethereum: 'EthereumManager',
    addresses: List[ChecksumEthAddress],
    has_premium: bool,
    msg_aggregator: MessagesAggregator,
    from_ts: Timestamp,
    to_ts: Timestamp,
) -> List[Eth2Deposit]:
    events = ETH2_DEPOSIT.get_logs(
        ethereum=ethereum,
        event_name='DepositEvent',
        argument_filters={},
        from_block=ETH2_DEPOSIT.deployed_block,
        to_block='latest',
    )
    transactions = ethereum.transactions.query(
        addresses=addresses,
        from_ts=from_ts,
        to_ts=to_ts,
        with_limit=False,
        recent_first=False,
    )
    deposits: List[Eth2Deposit] = []
    for transaction in transactions:
        if transaction.to_address != ETH2_DEPOSIT.address:
            continue

        tx_hash = '0x' + transaction.tx_hash.hex()
        for event in events:
            # Now find the corresponding event. If no event is found the transaction
            # probably failed or was something other than a deposit
            if event['transactionHash'] == tx_hash:
                decoded_data = decode_event_data(event['data'], EVENT_ABI)
                amount = int.from_bytes(decoded_data[2], byteorder='little')
                usd_price = ZERO
                if has_premium:  # won't show this to non-premium so don't bother
                    usd_price = query_usd_price_zero_if_error(
                        asset=A_ETH,
                        time=transaction.timestamp,
                        location='Eth2 staking query',
                        msg_aggregator=msg_aggregator,
                    )
                normalized_amount = from_gwei(FVal(amount))
                deposits.append(
                    Eth2Deposit(
                        from_address=transaction.from_address,
                        pubkey='0x' + decoded_data[0].hex(),
                        withdrawal_credentials='0x' + decoded_data[1].hex(),
                        value=Balance(normalized_amount,
                                      usd_price * normalized_amount),
                        validator_index=int.from_bytes(decoded_data[4],
                                                       byteorder='little'),
                        tx_hash=tx_hash,
                        log_index=event['logIndex'],
                        timestamp=Timestamp(transaction.timestamp),
                    ))
                break

    return deposits
Esempio n. 3
0
    def get_validator_deposits(
        self,
        indices_or_pubkeys: Union[List[int], List[Eth2PubKey]],
    ) -> List[Eth2Deposit]:
        """Get the deposits of all the validators given from the list of indices or pubkeys

        Queries in chunks of 100 due to api limitations

        May raise:
        - RemoteError due to problems querying beaconcha.in API
        """
        chunks = _calculate_query_chunks(indices_or_pubkeys)
        data = []
        for chunk in chunks:
            result = self._query(
                module='validator',
                endpoint='deposits',
                encoded_args=','.join(str(x) for x in chunk),
            )
            if isinstance(result, list):
                data.extend(result)
            else:
                data.append(result)

        deposits = []
        for entry in data:
            try:
                amount = from_gwei(FVal(entry['amount']))
                timestamp = entry['block_ts']
                usd_price = query_usd_price_zero_if_error(
                    asset=A_ETH,
                    time=timestamp,
                    location=f'Eth2 staking deposit at time {timestamp}',
                    msg_aggregator=self.msg_aggregator,
                )
                deposits.append(
                    Eth2Deposit(
                        from_address=deserialize_ethereum_address(
                            entry['from_address']),
                        pubkey=entry['publickey'],
                        withdrawal_credentials=entry['withdrawal_credentials'],
                        value=Balance(
                            amount=amount,
                            usd_value=amount * usd_price,
                        ),
                        tx_hash=hexstring_to_bytes(entry['tx_hash']),
                        tx_index=entry['tx_index'],
                        timestamp=entry['block_ts'],
                    ))
            except (DeserializationError, KeyError) as e:
                msg = str(e)
                if isinstance(e, KeyError):
                    msg = f'Missing key entry for {msg}.'
                raise RemoteError(
                    f'Beaconchai.in deposits response processing error. {msg}',
                ) from e

        return deposits
Esempio n. 4
0
    def _get_comp_events(
        self,
        address: ChecksumEthAddress,
        from_ts: Timestamp,
        to_ts: Timestamp,
    ) -> List[CompoundEvent]:
        self.ethereum.get_blocknumber_by_time(from_ts)
        from_block = max(
            COMP_DEPLOYED_BLOCK,
            self.ethereum.get_blocknumber_by_time(from_ts),
        )
        argument_filters = {
            'from': COMPTROLLER_PROXY.address,
            'to': address,
        }
        comp_events = self.ethereum.get_logs(
            contract_address=A_COMP.ethereum_address,
            abi=ERC20TOKEN_ABI,
            event_name='Transfer',
            argument_filters=argument_filters,
            from_block=from_block,
            to_block=self.ethereum.get_blocknumber_by_time(to_ts),
        )

        events = []
        for event in comp_events:
            timestamp = self.ethereum.get_event_timestamp(event)
            tx_hash = event['transactionHash']
            amount = token_normalized_value(hexstr_to_int(event['data']),
                                            A_COMP)
            usd_price = query_usd_price_zero_if_error(
                asset=A_COMP,
                time=timestamp,
                location=f'comp_claim {tx_hash}',
                msg_aggregator=self.msg_aggregator,
            )
            value = Balance(amount, amount * usd_price)
            events.append(
                CompoundEvent(
                    event_type='comp',
                    address=address,
                    block_number=event['blockNumber'],
                    timestamp=timestamp,
                    asset=A_COMP,
                    value=value,
                    to_asset=None,
                    to_value=None,
                    realized_pnl=value,
                    tx_hash=tx_hash,
                    log_index=event['logIndex'],
                ))

        return events
Esempio n. 5
0
File: graph.py Progetto: rotki/rotki
    def _parse_repays(
        self,
        repays: List[Dict[str, Any]],
        from_ts: Timestamp,
        to_ts: Timestamp,
    ) -> List[AaveRepayEvent]:
        events = []
        for entry in repays:
            common = _parse_common_event_data(entry, from_ts, to_ts)
            if common is None:
                continue  # either timestamp out of range or error (logged in the function above)
            timestamp, tx_hash, index = common
            result = _get_reserve_asset_and_decimals(entry,
                                                     reserve_key='reserve')
            if result is None:
                continue  # problem parsing, error already logged
            asset, decimals = result
            if 'amountAfterFee' in entry:
                amount_after_fee = token_normalized_value_decimals(
                    int(entry['amountAfterFee']),
                    token_decimals=decimals,
                )
                fee = token_normalized_value_decimals(int(entry['fee']),
                                                      token_decimals=decimals)
            else:
                # In the V2 subgraph the amountAfterFee and Fee keys are replaced by amount
                amount_after_fee = token_normalized_value_decimals(
                    int(entry['amount']),
                    token_decimals=decimals,
                )
                fee = ZERO
            usd_price = query_usd_price_zero_if_error(
                asset=asset,
                time=timestamp,
                location=f'aave repay event {tx_hash} from graph query',
                msg_aggregator=self.msg_aggregator,
            )
            events.append(
                AaveRepayEvent(
                    event_type='repay',
                    asset=asset,
                    value=Balance(amount=amount_after_fee,
                                  usd_value=amount_after_fee * usd_price),
                    fee=Balance(amount=fee, usd_value=fee * usd_price),
                    block_number=0,  # can't get from graph query
                    timestamp=timestamp,
                    tx_hash=tx_hash,
                    log_index=
                    index,  # not really the log index, but should also be unique
                ))

        return events
Esempio n. 6
0
 def _get_asset_and_balance(
     self,
     entry: Dict[str, Any],
     timestamp: Timestamp,
     reserve_key: str,
     amount_key: str,
     location: str,
 ) -> Optional[Tuple[Asset, Balance]]:
     """Utility function to parse asset from graph query amount and price and return balance"""
     result = _get_reserve_asset_and_decimals(entry, reserve_key)
     if result is None:
         return None
     asset, decimals = result
     amount = token_normalized_value_decimals(
         token_amount=int(entry[amount_key]),
         token_decimals=decimals,
     )
     usd_price = query_usd_price_zero_if_error(
         asset=asset,
         time=timestamp,
         location=location,
         msg_aggregator=self.msg_aggregator,
     )
     return asset, Balance(amount=amount, usd_value=amount * usd_price)
Esempio n. 7
0
    def _process_events(
        self,
        events: List[CompoundEvent],
        given_defi_balances: GIVEN_DEFI_BALANCES,
    ) -> Tuple[ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS,
               ADDRESS_TO_ASSETS]:
        """Processes all events and returns a dictionary of earned balances totals"""
        assets: ADDRESS_TO_ASSETS = defaultdict(lambda: defaultdict(Balance))
        loss_assets: ADDRESS_TO_ASSETS = defaultdict(
            lambda: defaultdict(Balance))
        rewards_assets: ADDRESS_TO_ASSETS = defaultdict(
            lambda: defaultdict(Balance))

        profit_so_far: ADDRESS_TO_ASSETS = defaultdict(
            lambda: defaultdict(Balance))
        loss_so_far: ADDRESS_TO_ASSETS = defaultdict(
            lambda: defaultdict(Balance))
        liquidation_profit: ADDRESS_TO_ASSETS = defaultdict(
            lambda: defaultdict(Balance))

        balances = self.get_balances(given_defi_balances)

        for idx, event in enumerate(events):
            if event.event_type == 'mint':
                assets[event.address][event.asset] -= event.value
            elif event.event_type == 'redeem':
                assert event.to_asset, 'redeem events should have a to_asset'
                assert event.to_value, 'redeem events should have a to_value'
                profit_amount = (
                    assets[event.address][event.to_asset].amount +
                    event.to_value.amount -
                    profit_so_far[event.address][event.to_asset].amount)
                profit: Optional[Balance]
                if profit_amount >= 0:
                    usd_price = query_usd_price_zero_if_error(
                        asset=event.to_asset,
                        time=event.timestamp,
                        location='comp redeem event processing',
                        msg_aggregator=self.msg_aggregator,
                    )
                    profit = Balance(profit_amount, profit_amount * usd_price)
                    profit_so_far[event.address][event.to_asset] += profit
                else:
                    profit = None

                assets[event.address][event.to_asset] += event.to_value
                events[idx] = event._replace(
                    realized_pnl=profit)  # TODO: maybe not named tuple?

            elif event.event_type == 'borrow':
                loss_assets[event.address][event.asset] -= event.value
            elif event.event_type == 'repay':
                loss_amount = (loss_assets[event.address][event.asset].amount +
                               event.value.amount -
                               loss_so_far[event.address][event.asset].amount)
                loss: Optional[Balance]
                if loss_amount >= 0:
                    usd_price = query_usd_price_zero_if_error(
                        asset=event.asset,
                        time=event.timestamp,
                        location='comp repay event processing',
                        msg_aggregator=self.msg_aggregator,
                    )
                    loss = Balance(loss_amount, loss_amount * usd_price)
                    loss_so_far[event.address][event.asset] += loss
                else:
                    loss = None

                loss_assets[event.address][event.asset] += event.value
                events[idx] = event._replace(
                    realized_pnl=loss)  # TODO: maybe not named tuple?
            elif event.event_type == 'liquidation':
                assert event.to_asset, 'liquidation events should have a to_asset'
                # Liquidator covers part of the borrowed amount
                loss_assets[event.address][event.asset] += event.value
                liquidation_profit[event.address][event.asset] += event.value
                # Liquidator receives discounted to_asset
                loss_assets[event.address][event.to_asset] += event.to_value
                loss_so_far[event.address][event.to_asset] += event.to_value
            elif event.event_type == 'comp':
                rewards_assets[event.address][A_COMP] += event.value

        for address, bentry in balances.items():
            for asset, entry in bentry['lending'].items():
                profit_amount = (profit_so_far[address][asset].amount +
                                 entry.balance.amount +
                                 assets[address][asset].amount)
                if profit_amount < 0:
                    log.error(
                        f'In compound we calculated negative profit. Should not happen. '
                        f'address: {address} asset: {asset} ', )
                else:
                    usd_price = Inquirer().find_usd_price(Asset(asset))
                    profit_so_far[address][asset] = Balance(
                        amount=profit_amount,
                        usd_value=profit_amount * usd_price,
                    )

            for asset, entry in bentry['borrowing'].items():
                remaining = entry.balance + loss_assets[address][asset]
                if remaining.amount < ZERO:
                    continue
                loss_so_far[address][asset] += remaining
                if loss_so_far[address][asset].usd_value < ZERO:
                    amount = loss_so_far[address][asset].amount
                    loss_so_far[address][asset] = Balance(
                        amount=amount,
                        usd_value=amount *
                        Inquirer().find_usd_price(Asset(asset)),
                    )

            for asset, entry in bentry['rewards'].items():
                rewards_assets[address][asset] += entry.balance

        return profit_so_far, loss_so_far, liquidation_profit, rewards_assets
Esempio n. 8
0
    def _get_lend_events(
        self,
        event_type: Literal['mint', 'redeem'],
        address: ChecksumEthAddress,
        from_ts: Timestamp,
        to_ts: Timestamp,
    ) -> List[CompoundEvent]:
        param_types, param_values = get_common_params(from_ts, to_ts, address)
        if event_type == 'mint':
            graph_event_name = 'mintEvents'
            addr_position = 'to'
        elif event_type == 'redeem':
            graph_event_name = 'redeemEvents'
            addr_position = 'from'

        result = self.graph.query(  # type: ignore
            querystr=LEND_EVENTS_QUERY_PREFIX.format(
                graph_event_name=graph_event_name,
                addr_position=addr_position,
            ),
            param_types=param_types,
            param_values=param_values,
        )

        events = []
        for entry in result[graph_event_name]:
            ctoken_symbol = entry['cTokenSymbol']
            try:
                ctoken_asset = Asset(ctoken_symbol)
            except UnknownAsset:
                log.error(
                    f'Found unexpected cTokenSymbol {ctoken_symbol} during graph query. Skipping.'
                )
                continue

            underlying_symbol = ctoken_symbol[1:]
            try:
                underlying_asset = Asset(underlying_symbol)
            except UnknownAsset:
                log.error(
                    f'Found unexpected token symbol {underlying_symbol} during '
                    f'graph query. Skipping.', )
                continue
            timestamp = entry['blockTime']
            usd_price = query_usd_price_zero_if_error(
                asset=underlying_asset,
                time=timestamp,
                location=f'compound {event_type}',
                msg_aggregator=self.msg_aggregator,
            )
            underlying_amount = FVal(entry['underlyingAmount'])
            usd_value = underlying_amount * usd_price
            parse_result = _get_txhash_and_logidx(entry['id'])
            if parse_result is None:
                log.error(
                    f'Found unprocessable mint id from the graph {entry["id"]}. Skipping'
                )
                continue
            amount = FVal(entry['amount'])

            if event_type == 'mint':
                from_value = Balance(amount=underlying_amount,
                                     usd_value=usd_value)
                to_value = Balance(amount=amount, usd_value=usd_value)
                from_asset = underlying_asset
                to_asset = ctoken_asset
            else:  # redeem
                from_value = Balance(amount=amount, usd_value=usd_value)
                to_value = Balance(amount=underlying_amount,
                                   usd_value=usd_value)
                from_asset = ctoken_asset
                to_asset = underlying_asset

            events.append(
                CompoundEvent(
                    event_type=event_type,
                    address=address,
                    block_number=entry['blockNumber'],
                    timestamp=timestamp,
                    asset=from_asset,
                    value=from_value,
                    to_asset=to_asset,
                    to_value=to_value,
                    realized_pnl=None,
                    tx_hash=parse_result[0],
                    log_index=parse_result[1],
                ))

        return events
Esempio n. 9
0
    def _get_liquidation_events(
        self,
        address: ChecksumEthAddress,
        from_ts: Timestamp,
        to_ts: Timestamp,
    ) -> List[CompoundEvent]:
        """https://compound.finance/docs/ctokens#liquidate-borrow"""
        param_types, param_values = get_common_params(from_ts, to_ts, address)
        result = self.graph.query(  # type: ignore
            querystr=
            """liquidationEvents (where: {blockTime_lte: $end_ts, blockTime_gte: $start_ts, from: $address}) {
    id
    amount
    from
    blockNumber
    blockTime
    cTokenSymbol
    underlyingSymbol
    underlyingRepayAmount
}}""",
            param_types=param_types,
            param_values=param_values,
        )

        events = []
        for entry in result['liquidationEvents']:
            ctoken_symbol = entry['cTokenSymbol']
            try:
                ctoken_asset = Asset(ctoken_symbol)
            except UnknownAsset:
                log.error(
                    f'Found unexpected cTokenSymbol {ctoken_symbol} during graph query. Skipping.'
                )
                continue
            underlying_symbol = entry['underlyingSymbol']
            try:
                underlying_asset = Asset(underlying_symbol)
            except UnknownAsset:
                log.error(
                    f'Found unexpected token symbol {underlying_symbol} during '
                    f'graph query. Skipping.', )
                continue
            timestamp = entry['blockTime']
            # Amount/value of underlying asset paid by liquidator
            # Essentially liquidator covers part of the debt of the user
            debt_amount = FVal(entry['underlyingRepayAmount'])
            underlying_usd_price = query_usd_price_zero_if_error(
                asset=underlying_asset,
                time=timestamp,
                location='compound liquidation underlying asset',
                msg_aggregator=self.msg_aggregator,
            )
            debt_usd_value = debt_amount * underlying_usd_price
            # Amount/value of ctoken_asset lost to the liquidator
            # This is what the liquidator gains at a discount
            liquidated_amount = FVal(entry['amount'])
            liquidated_usd_price = query_usd_price_zero_if_error(
                asset=ctoken_asset,
                time=timestamp,
                location='compound liquidation ctoken asset',
                msg_aggregator=self.msg_aggregator,
            )
            liquidated_usd_value = liquidated_amount * liquidated_usd_price
            parse_result = _get_txhash_and_logidx(entry['id'])
            if parse_result is None:
                log.error(
                    f'Found unprocessable liquidation id from the graph {entry["id"]}. Skipping',
                )
                continue

            gained_value = Balance(amount=debt_amount,
                                   usd_value=debt_usd_value)
            lost_value = Balance(amount=liquidated_amount,
                                 usd_value=liquidated_usd_value)
            events.append(
                CompoundEvent(
                    event_type='liquidation',
                    address=address,
                    block_number=entry['blockNumber'],
                    timestamp=timestamp,
                    asset=underlying_asset,
                    value=gained_value,
                    to_asset=ctoken_asset,
                    to_value=lost_value,
                    realized_pnl=None,
                    tx_hash=parse_result[0],
                    log_index=parse_result[1],
                ))

        return events
Esempio n. 10
0
    def _get_borrow_events(
        self,
        event_type: Literal['borrow', 'repay'],
        address: ChecksumEthAddress,
        from_ts: Timestamp,
        to_ts: Timestamp,
    ) -> List[CompoundEvent]:
        param_types, param_values = get_common_params(from_ts, to_ts, address)
        if event_type == 'borrow':
            graph_event_name = 'borrowEvents'
            payer_or_empty = ''
        elif event_type == 'repay':
            graph_event_name = 'repayEvents'
            payer_or_empty = 'payer'

        result = self.graph.query(  # type: ignore
            querystr=BORROW_EVENTS_QUERY_PREFIX.format(
                graph_event_name=graph_event_name,
                payer_or_empty=payer_or_empty,
            ),
            param_types=param_types,
            param_values=param_values,
        )

        events = []
        for entry in result[graph_event_name]:
            underlying_symbol = entry['underlyingSymbol']
            try:
                underlying_asset = Asset(underlying_symbol)
            except UnknownAsset:
                log.error(
                    f'Found unexpected token symbol {underlying_symbol} during '
                    f'graph query. Skipping.', )
                continue
            timestamp = entry['blockTime']
            usd_price = query_usd_price_zero_if_error(
                asset=underlying_asset,
                time=timestamp,
                location=f'compound {event_type}',
                msg_aggregator=self.msg_aggregator,
            )
            amount = FVal(entry['amount'])
            parse_result = _get_txhash_and_logidx(entry['id'])
            if parse_result is None:
                log.error(
                    f'Found unprocessable borrow/repay id from the graph {entry["id"]}. Skipping',
                )
                continue

            events.append(
                CompoundEvent(
                    event_type=event_type,
                    address=address,
                    block_number=entry['blockNumber'],
                    timestamp=timestamp,
                    asset=underlying_asset,
                    value=Balance(amount=amount, usd_value=amount * usd_price),
                    to_asset=None,
                    to_value=None,
                    realized_pnl=None,
                    tx_hash=parse_result[0],
                    log_index=parse_result[1],
                ))

        return events
Esempio n. 11
0
    def _calculate_interest_and_profit(
        self,
        user_address: ChecksumEthAddress,
        user_result: Dict[str, Any],
        actions: List[AaveDepositWithdrawalEvent],
        balances: AaveBalances,
        db_interest_events: Set[AaveInterestEvent],
        from_ts: Timestamp,
        to_ts: Timestamp,
    ) -> Tuple[List[AaveInterestEvent], Dict[Asset, Balance]]:
        reserve_history = {}
        for reserve in user_result['reserves']:
            pairs = reserve['id'].split('0x')
            if len(pairs) != 4:
                log.error(
                    f'Expected to find 3 addresses in graph\'s reserve history id '
                    f'but the encountered id does not match: {reserve["id"]}. Skipping entry...',
                )
                continue

            try:
                address_s = '0x' + pairs[2]
                reserve_address = deserialize_ethereum_address(address_s)
            except DeserializationError:
                log.error(
                    f'Failed to deserialize reserve address {address_s} '
                    f'Skipping reserve address {address_s} for user address {user_address}',
                )
                continue

            atoken_history = _parse_atoken_balance_history(
                history=reserve['aTokenBalanceHistory'],
                from_ts=from_ts,
                to_ts=to_ts,
            )
            reserve_history[reserve_address] = atoken_history

        interest_events: List[AaveInterestEvent] = []
        atoken_balances: Dict[Asset, FVal] = defaultdict(FVal)
        used_history_indices = set()
        total_earned: Dict[Asset, Balance] = defaultdict(Balance)

        # Go through the existing db interest events and add total earned
        for interest_event in db_interest_events:
            total_earned[interest_event.asset] += interest_event.value

        # Create all new interest events in the query
        actions.sort(key=lambda event: event.timestamp)
        for action in actions:
            if action.event_type == 'deposit':
                atoken_balances[action.asset] += action.value.amount
            else:  # withdrawal
                atoken_balances[action.asset] -= action.value.amount

            action_reserve_address = asset_to_aave_reserve(action.asset)
            if action_reserve_address is None:
                log.error(
                    f'Could not find aave reserve address for asset'
                    f'{action.asset} in an aave graph response.'
                    f' Skipping entry...', )
                continue
            history = reserve_history.get(action_reserve_address, None)
            if history is None:
                log.error(
                    f'Could not find aTokenBalanceHistory for reserve '
                    f'{action_reserve_address} in an aave graph response.'
                    f' Skipping entry...', )
                continue
            history.sort(key=lambda event: event.timestamp)

            for idx, entry in enumerate(history):
                if idx in used_history_indices:
                    continue
                used_history_indices.add(idx)

                if entry.tx_hash == action.tx_hash:
                    diff = entry.balance - atoken_balances[action.asset]
                    if diff != ZERO:
                        atoken_balances[action.asset] = entry.balance
                        asset = ASSET_TO_ATOKENV1.get(action.asset, None)
                        if asset is None:
                            log.error(
                                f'Could not find corresponding aToken to '
                                f'{action.asset.identifier} during an aave graph query'
                                f' Skipping entry...', )
                            continue
                        timestamp = entry.timestamp
                        usd_price = query_usd_price_zero_if_error(
                            asset=asset,
                            time=timestamp,
                            location='aave interest event from graph query',
                            msg_aggregator=self.msg_aggregator,
                        )
                        earned_balance = Balance(amount=diff,
                                                 usd_value=diff * usd_price)
                        interest_event = AaveInterestEvent(
                            event_type='interest',
                            asset=asset,
                            value=earned_balance,
                            block_number=0,  # can't get from graph query
                            timestamp=timestamp,
                            tx_hash=entry.tx_hash,
                            # not really the log index, but should also be unique
                            log_index=action.log_index + 1,
                        )
                        if interest_event in db_interest_events:
                            # This should not really happen since we already query
                            # historical atoken balance history in the new range
                            log.warning(
                                f'During aave subgraph query interest and profit calculation '
                                f'tried to generate interest event {interest_event} that '
                                f'already existed in the DB ', )
                            continue

                        interest_events.append(interest_event)
                        total_earned[asset] += earned_balance

                    # and once done break off the loop
                    break

                # else this atoken history is not due to an action, so skip it.
                # It's probably due to a simple transfer
                atoken_balances[action.asset] = entry.balance
                if action.event_type == 'deposit':
                    atoken_balances[action.asset] += action.value.amount
                else:  # withdrawal
                    atoken_balances[action.asset] -= action.value.amount

        # Take aave unpaid interest into account
        for balance_asset, lending_balance in balances.lending.items():
            atoken = ASSET_TO_ATOKENV1.get(balance_asset, None)
            if atoken is None:
                log.error(
                    f'Could not find corresponding aToken to '
                    f'{balance_asset.identifier} during an aave graph unpair interest '
                    f'query. Skipping entry...', )
                continue
            principal_balance = self.ethereum.call_contract(
                contract_address=atoken.ethereum_address,
                abi=ATOKEN_ABI,
                method_name='principalBalanceOf',
                arguments=[user_address],
            )
            unpaid_interest = lending_balance.balance.amount - (
                principal_balance /
                (FVal(10)**FVal(atoken.decimals)))  # noqa: E501
            usd_price = Inquirer().find_usd_price(atoken)
            total_earned[atoken] += Balance(
                amount=unpaid_interest,
                usd_value=unpaid_interest * usd_price,
            )

        return interest_events, total_earned
Esempio n. 12
0
    def get_events_for_atoken_and_address(
        self,
        user_address: ChecksumEthAddress,
        atoken: EthereumToken,
        deposit_events: List[Dict[str, Any]],
        withdraw_events: List[Dict[str, Any]],
        from_block: int,
        to_block: int,
    ) -> List[AaveEvent]:
        """This function should be entered while holding the history_lock
        semaphore"""
        argument_filters = {
            'from': ZERO_ADDRESS,
            'to': user_address,
        }
        mint_events = self.ethereum.get_logs(
            contract_address=atoken.ethereum_address,
            abi=ATOKEN_ABI,
            event_name='Transfer',
            argument_filters=argument_filters,
            from_block=from_block,
            to_block=to_block,
        )
        mint_data = set()
        mint_data_to_log_index = {}
        for event in mint_events:
            amount = hex_or_bytes_to_int(event['data'])
            if amount == 0:
                continue  # first mint can be for 0. Ignore
            entry = (
                deserialize_blocknumber(event['blockNumber']),
                amount,
                self.ethereum.get_event_timestamp(event),
                event['transactionHash'],
            )
            mint_data.add(entry)
            mint_data_to_log_index[entry] = deserialize_int_from_hex_or_int(
                event['logIndex'],
                'aave log index',
            )

        reserve_asset = _atoken_to_reserve_asset(atoken)
        reserve_address, decimals = _get_reserve_address_decimals(
            reserve_asset.identifier)
        aave_events = []
        for event in deposit_events:
            if hex_or_bytes_to_address(event['topics'][1]) == reserve_address:
                # first 32 bytes of the data are the amount
                deposit = hex_or_bytes_to_int(event['data'][:66])
                block_number = deserialize_blocknumber(event['blockNumber'])
                timestamp = self.ethereum.get_event_timestamp(event)
                tx_hash = event['transactionHash']
                log_index = deserialize_int_from_hex_or_int(
                    event['logIndex'], 'aave log index')
                # If there is a corresponding deposit event remove the minting event data
                entry = (block_number, deposit, timestamp, tx_hash)
                if entry in mint_data:
                    mint_data.remove(entry)
                    del mint_data_to_log_index[entry]

                usd_price = query_usd_price_zero_if_error(
                    asset=reserve_asset,
                    time=timestamp,
                    location='aave deposit',
                    msg_aggregator=self.msg_aggregator,
                )
                deposit_amount = deposit / (FVal(10)**FVal(decimals))
                aave_events.append(
                    AaveEvent(
                        event_type='deposit',
                        asset=reserve_asset,
                        value=Balance(
                            amount=deposit_amount,
                            usd_value=deposit_amount * usd_price,
                        ),
                        block_number=block_number,
                        timestamp=timestamp,
                        tx_hash=tx_hash,
                        log_index=log_index,
                    ))

        for data in mint_data:
            usd_price = query_usd_price_zero_if_error(
                asset=atoken,
                time=data[2],
                location='aave interest profit',
                msg_aggregator=self.msg_aggregator,
            )
            interest_amount = data[1] / (FVal(10)**FVal(decimals))
            aave_events.append(
                AaveEvent(
                    event_type='interest',
                    asset=atoken,
                    value=Balance(
                        amount=interest_amount,
                        usd_value=interest_amount * usd_price,
                    ),
                    block_number=data[0],
                    timestamp=data[2],
                    tx_hash=data[3],
                    log_index=mint_data_to_log_index[data],
                ))

        for event in withdraw_events:
            if hex_or_bytes_to_address(event['topics'][1]) == reserve_address:
                # first 32 bytes of the data are the amount
                withdrawal = hex_or_bytes_to_int(event['data'][:66])
                block_number = deserialize_blocknumber(event['blockNumber'])
                timestamp = self.ethereum.get_event_timestamp(event)
                tx_hash = event['transactionHash']
                usd_price = query_usd_price_zero_if_error(
                    asset=reserve_asset,
                    time=timestamp,
                    location='aave withdrawal',
                    msg_aggregator=self.msg_aggregator,
                )
                withdrawal_amount = withdrawal / (FVal(10)**FVal(decimals))
                aave_events.append(
                    AaveEvent(
                        event_type='withdrawal',
                        asset=reserve_asset,
                        value=Balance(
                            amount=withdrawal_amount,
                            usd_value=withdrawal_amount * usd_price,
                        ),
                        block_number=block_number,
                        timestamp=timestamp,
                        tx_hash=tx_hash,
                        log_index=deserialize_int_from_hex_or_int(
                            event['logIndex'], 'aave log index'),
                    ))

        return aave_events
Esempio n. 13
0
File: eth2.py Progetto: step21/rotki
    def _get_eth2_staking_deposits_onchain(
        self,
        addresses: List[ChecksumEthAddress],
        msg_aggregator: MessagesAggregator,
        from_ts: Timestamp,
        to_ts: Timestamp,
    ) -> List[Eth2Deposit]:
        from_block = max(
            ETH2_DEPOSIT.deployed_block,
            self.ethereum.get_blocknumber_by_time(from_ts),
        )
        to_block = self.ethereum.get_blocknumber_by_time(to_ts)
        events = ETH2_DEPOSIT.get_logs(
            ethereum=self.ethereum,
            event_name='DepositEvent',
            argument_filters={},
            from_block=from_block,
            to_block=to_block,
        )
        transactions = self.ethereum.transactions.query(
            addresses=addresses,
            from_ts=from_ts,
            to_ts=to_ts,
            with_limit=False,
            recent_first=False,
        )
        deposits: List[Eth2Deposit] = []
        for transaction in transactions:
            if transaction.to_address != ETH2_DEPOSIT.address:
                continue

            tx_hash = '0x' + transaction.tx_hash.hex()
            for event in events:
                # Now find the corresponding event. If no event is found the transaction
                # probably failed or was something other than a deposit
                if event['transactionHash'] == tx_hash:
                    decoded_data = decode_event_data(event['data'], EVENT_ABI)
                    # all pylint ignores below due to https://github.com/PyCQA/pylint/issues/4114
                    amount = int.from_bytes(decoded_data[2],
                                            byteorder='little')  # pylint: disable=unsubscriptable-object  # noqa: E501
                    usd_price = query_usd_price_zero_if_error(
                        asset=A_ETH,
                        time=transaction.timestamp,
                        location='Eth2 staking query',
                        msg_aggregator=msg_aggregator,
                    )
                    normalized_amount = from_gwei(FVal(amount))
                    deposits.append(
                        Eth2Deposit(
                            from_address=transaction.from_address,
                            pubkey='0x' + decoded_data[0].hex(),  # pylint: disable=unsubscriptable-object  # noqa: E501
                            withdrawal_credentials='0x' +
                            decoded_data[1].hex(),  # pylint: disable=unsubscriptable-object  # noqa: E501
                            value=Balance(normalized_amount,
                                          usd_price * normalized_amount),
                            deposit_index=int.from_bytes(decoded_data[4],
                                                         byteorder='little'),  # pylint: disable=unsubscriptable-object  # noqa: E501
                            tx_hash=tx_hash,
                            log_index=event['logIndex'],
                            timestamp=Timestamp(transaction.timestamp),
                        ))
                    break

        return deposits
Esempio n. 14
0
def _scrape_validator_daily_stats(
    validator_index: int,
    last_known_timestamp: Timestamp,
    msg_aggregator: MessagesAggregator,
) -> List[ValidatorDailyStats]:
    """Scrapes the website of beaconcha.in and parses the data directly out of the data table.

    The parser is very simple. And can break if they change stuff in the way
    it's displayed in https://beaconcha.in/validator/33710/stats. If that happpens
    we need to adjust here. If we also somehow programatically get the data in a CSV
    that would be swell.

    May raise:
    - RemoteError if we can't query beaconcha.in or if the data is not in the expected format
    """
    url = f'https://beaconcha.in/validator/{validator_index}/stats'
    log.debug(f'Querying beaconchain stats: {url}')
    try:
        response = requests.get(url)
    except requests.exceptions.RequestException as e:
        raise RemoteError(
            f'Beaconcha.in api request {url} failed due to {str(e)}') from e

    if response.status_code != 200:
        raise RemoteError(
            f'Beaconcha.in api request {url} failed with code: {response.status_code}'
            f' and response: {response.text}', )

    soup = BeautifulSoup(response.text,
                         'html.parser',
                         parse_only=SoupStrainer('tbod'))
    if soup is None:
        raise RemoteError(
            'Could not find <tbod> while parsing beaconcha.in stats page')
    try:
        tr = soup.tbod.tr
    except AttributeError as e:
        raise RemoteError(
            'Could not find first <tr> while parsing beaconcha.in stats page'
        ) from e

    timestamp = Timestamp(0)
    pnl = ZERO
    start_amount = ZERO
    end_amount = ZERO
    missed_attestations = 0
    orphaned_attestations = 0
    proposed_blocks = 0
    missed_blocks = 0
    orphaned_blocks = 0
    included_attester_slashings = 0
    proposer_attester_slashings = 0
    deposits_number = 0
    amount_deposited = ZERO
    column_pos = 1
    stats: List[ValidatorDailyStats] = []
    while tr is not None:

        for column in tr.children:
            if column.name != 'td':
                continue

            if column_pos == 1:  # date
                date = column.string
                try:
                    timestamp = create_timestamp(date, formatstr='%d %b %Y')
                except ValueError as e:
                    raise RemoteError(
                        f'Failed to parse {date} to timestamp') from e

                if timestamp <= last_known_timestamp:
                    return stats  # we are done

                column_pos += 1
            elif column_pos == 2:
                pnl = _parse_fval(column.string, 'income')
                column_pos += 1
            elif column_pos == 3:
                start_amount = _parse_fval(column.string, 'start amount')
                column_pos += 1
            elif column_pos == 4:
                end_amount = _parse_fval(column.string, 'end amount')
                column_pos += 1
            elif column_pos == 5:
                missed_attestations = _parse_int(column.string,
                                                 'missed attestations')
                column_pos += 1
            elif column_pos == 6:
                orphaned_attestations = _parse_int(column.string,
                                                   'orphaned attestations')
                column_pos += 1
            elif column_pos == 7:
                proposed_blocks = _parse_int(column.string, 'proposed blocks')
                column_pos += 1
            elif column_pos == 8:
                missed_blocks = _parse_int(column.string, 'missed blocks')
                column_pos += 1
            elif column_pos == 9:
                orphaned_blocks = _parse_int(column.string, 'orphaned blocks')
                column_pos += 1
            elif column_pos == 10:
                included_attester_slashings = _parse_int(
                    column.string, 'included attester slashings')  # noqa: E501
                column_pos += 1
            elif column_pos == 11:
                proposer_attester_slashings = _parse_int(
                    column.string, 'proposer attester slashings')  # noqa: E501
                column_pos += 1
            elif column_pos == 12:
                deposits_number = _parse_int(column.string, 'deposits number')
                column_pos += 1
            elif column_pos == 13:
                amount_deposited = _parse_fval(column.string,
                                               'amount deposited')
                column_pos += 1

        column_pos = 1
        prices = [
            query_usd_price_zero_if_error(
                A_ETH,
                time=time,
                location='eth2 staking daily stats',
                msg_aggregator=msg_aggregator,
            ) for time in (timestamp, Timestamp(timestamp + DAY_IN_SECONDS))
        ]
        stats.append(
            ValidatorDailyStats(
                validator_index=validator_index,
                timestamp=timestamp,
                start_usd_price=prices[0],
                end_usd_price=prices[1],
                pnl=pnl,
                start_amount=start_amount,
                end_amount=end_amount,
                missed_attestations=missed_attestations,
                orphaned_attestations=orphaned_attestations,
                proposed_blocks=proposed_blocks,
                missed_blocks=missed_blocks,
                orphaned_blocks=orphaned_blocks,
                included_attester_slashings=included_attester_slashings,
                proposer_attester_slashings=proposer_attester_slashings,
                deposits_number=deposits_number,
                amount_deposited=amount_deposited,
            ))
        tr = tr.find_next_sibling()

    return stats
Esempio n. 15
0
    def _consume_grant_entry(self, entry: Dict[str,
                                               Any]) -> Optional[LedgerAction]:
        """
        Consumes a grant entry from the CSV and turns it into a LedgerAction

        May raise:

        - DeserializationError
        - KeyError
        - UnknownAsset
        """
        if entry['Type'] != 'grant':
            return None

        timestamp = deserialize_timestamp_from_date(
            date=entry['date'],
            formatstr='%Y-%m-%dT%H:%M:%S',
            location='Gitcoin CSV',
            skip_milliseconds=True,
        )
        usd_value = deserialize_asset_amount(entry['Value In USD'])

        asset = get_asset_by_symbol(entry['token_name'])
        if asset is None:
            raise UnknownAsset(entry['token_name'])
        token_amount = deserialize_asset_amount(entry['token_value'])

        if token_amount == ZERO:  # try to make up for https://github.com/gitcoinco/web/issues/9213
            price = query_usd_price_zero_if_error(
                asset=asset,
                time=timestamp,
                location=f'Gitcoin CSV entry {entry["txid"]}',
                msg_aggregator=self.db.msg_aggregator,
            )
            if price == ZERO:
                self.db.msg_aggregator.add_warning(
                    f'Could not process gitcoin grant entry at {entry["date"]} for {asset.symbol} '
                    f'due to amount being zero and inability to find price. Skipping.',
                )
                return None
            # calculate the amount from price and value
            token_amount = usd_value / price  # type: ignore

        match = self.grantid_re.search(entry['url'])
        if match is None:
            self.db.msg_aggregator.add_warning(
                f'Could not process gitcoin grant entry at {entry["date"]} for {asset.symbol} '
                f'due to inability to read grant id. Skipping.', )
            return None

        grant_id = int(match.group(1))
        rate = Price(usd_value / token_amount)

        raw_txid = entry['txid']
        tx_type, tx_id = process_gitcoin_txid(key='txid', entry=entry)
        return LedgerAction(
            identifier=1,  # whatever does not go in the DB
            timestamp=timestamp,
            action_type=LedgerActionType.DONATION_RECEIVED,
            location=Location.GITCOIN,
            amount=token_amount,
            asset=asset,
            rate=rate,
            rate_asset=A_USD,  # let's use the rate gitcoin calculated
            link=raw_txid,
            notes=f'Gitcoin grant {grant_id} event',
            extra_data=GitcoinEventData(
                tx_id=tx_id,
                grant_id=grant_id,
                clr_round=None,  # can't get round from CSV
                tx_type=tx_type,
            ),
        )