def _process_trove_events( self, changes: List[Dict[str, Any]], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> List[DefiEvent]: events = [] total_lusd_trove_balance = Balance() realized_trove_lusd_loss = Balance() for change in changes: try: operation = TroveOperation.deserialize( change['troveOperation']) collateral_change = deserialize_asset_amount( change['collateralChange']) debt_change = deserialize_asset_amount(change['debtChange']) timestamp = change['transaction']['timestamp'] if timestamp < from_timestamp: continue if timestamp > to_timestamp: break got_asset: Optional[Asset] spent_asset: Optional[Asset] pnl = got_asset = got_balance = spent_asset = spent_balance = None count_spent_got_cost_basis = False # In one transaction it is possible to generate debt and change the collateral if debt_change != AssetAmount(ZERO): if debt_change > ZERO: # Generate debt count_spent_got_cost_basis = True got_asset = A_LUSD got_balance = Balance( amount=debt_change, usd_value=query_usd_price_or_use_default( asset=A_LUSD, time=timestamp, default_value=ZERO, location='Liquity', ), ) total_lusd_trove_balance += got_balance else: # payback debt count_spent_got_cost_basis = True spent_asset = A_LUSD spent_balance = Balance( amount=abs(debt_change), usd_value=query_usd_price_or_use_default( asset=A_LUSD, time=timestamp, default_value=ZERO, location='Liquity', ), ) total_lusd_trove_balance -= spent_balance balance = total_lusd_trove_balance.amount + realized_trove_lusd_loss.amount if balance < ZERO: pnl_balance = total_lusd_trove_balance + realized_trove_lusd_loss realized_trove_lusd_loss += -pnl_balance pnl = [ AssetBalance(asset=A_LUSD, balance=pnl_balance) ] if collateral_change != AssetAmount(ZERO): if collateral_change < ZERO: # Withdraw collateral got_asset = A_ETH got_balance = Balance( amount=abs(collateral_change), usd_value=query_usd_price_or_use_default( asset=A_ETH, time=timestamp, default_value=ZERO, location='Liquity', ), ) else: # Deposit collateral spent_asset = A_ETH spent_balance = Balance( amount=collateral_change, usd_value=query_usd_price_or_use_default( asset=A_ETH, time=timestamp, default_value=ZERO, location='Liquity', ), ) if operation in ( TroveOperation.LIQUIDATEINNORMALMODE, TroveOperation.LIQUIDATEINRECOVERYMODE, ): count_spent_got_cost_basis = True spent_asset = A_ETH spent_balance = Balance( amount=abs(collateral_change), usd_value=query_usd_price_or_use_default( asset=A_ETH, time=timestamp, default_value=ZERO, location='Liquity', ), ) pnl = [AssetBalance(asset=A_ETH, balance=-spent_balance)] event = DefiEvent( timestamp=Timestamp(change['transaction']['timestamp']), wrapped_event=change, event_type=DefiEventType.LIQUITY, got_asset=got_asset, got_balance=got_balance, spent_asset=spent_asset, spent_balance=spent_balance, pnl=pnl, count_spent_got_cost_basis=count_spent_got_cost_basis, tx_hash=change['transaction']['id'], ) events.append(event) except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' log.debug( f'Failed to extract defievent in Liquity from {change}') self.msg_aggregator.add_warning( f'Ignoring Liquity Trove event in Liquity. ' f'Failed to decode remote information. {msg}.', ) continue return events
def _query_vault_details( self, vault: MakerDAOVault, proxy: ChecksumEthAddress, urn: ChecksumEthAddress, ) -> Optional[MakerDAOVaultDetails]: # They can raise: # ConversionError due to hex_or_bytes_to_address, hexstr_to_int # RemoteError due to external query errors events = self.ethereum.get_logs( contract_address=MAKERDAO_CDP_MANAGER.address, abi=MAKERDAO_CDP_MANAGER.abi, event_name='NewCdp', argument_filters={'cdp': vault.identifier}, from_block=MAKERDAO_CDP_MANAGER.deployed_block, ) if len(events) == 0: self.msg_aggregator.add_error( 'No events found for a Vault creation. This should never ' 'happen. Please open a bug report: https://github.com/rotki/rotki/issues', ) return None if len(events) != 1: log.error( f'Multiple events found for a Vault creation: {events}. Taking ' f'only the first. This should not happen. Something is wrong', ) self.msg_aggregator.add_error( 'Multiple events found for a Vault creation. This should never ' 'happen. Please open a bug report: https://github.com/rotki/rotki/issues', ) creation_ts = self.ethereum.get_event_timestamp(events[0]) # get vat frob events for cross-checking argument_filters = { 'sig': '0x76088703', # frob 'arg1': '0x' + vault.ilk.hex(), # ilk 'arg2': address_to_bytes32(urn), # urn # arg3 can be urn for the 1st deposit, and proxy/owner for the next ones # so don't filter for it # 'arg3': address_to_bytes32(proxy), # proxy - owner } frob_events = self.ethereum.get_logs( contract_address=MAKERDAO_VAT.address, abi=MAKERDAO_VAT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_VAT.deployed_block, ) frob_event_tx_hashes = [x['transactionHash'] for x in frob_events] gemjoin = GEMJOIN_MAPPING.get(vault.collateral_type, None) if gemjoin is None: self.msg_aggregator.add_warning( f'Unknown makerdao vault collateral type detected {vault.collateral_type}.' 'Skipping ...', ) return None vault_events = [] # Get the collateral deposit events argument_filters = { 'sig': '0x3b4da69f', # join # In cases where a CDP has been migrated from a SAI CDP to a DAI # Vault the usr in the first deposit will be the old address. To # detect the first deposit in these cases we need to check for # arg1 being the urn # 'usr': proxy, 'arg1': address_to_bytes32(urn), } events = self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, ) # all subsequent deposits should have the proxy as a usr # but for non-migrated CDPS the previous query would also work # so in those cases we will have the first deposit 2 times argument_filters = { 'sig': '0x3b4da69f', # join 'usr': proxy, } events.extend( self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, )) deposit_tx_hashes = set() for event in events: tx_hash = event['transactionHash'] if tx_hash in deposit_tx_hashes: # Skip duplicate deposit that would be detected in non migrated CDP case continue if tx_hash not in frob_event_tx_hashes: # If there is no corresponding frob event then skip continue deposit_tx_hashes.add(tx_hash) amount = asset_normalized_value( amount=hexstr_to_int(event['topics'][3]), asset=vault.collateral_asset, ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral deposit', ) vault_events.append( VaultEvent( event_type=VaultEventType.DEPOSIT_COLLATERAL, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=tx_hash, )) # Get the collateral withdrawal events argument_filters = { 'sig': '0xef693bed', # exit 'usr': proxy, } events = self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, ) for event in events: tx_hash = event['transactionHash'] if tx_hash not in frob_event_tx_hashes: # If there is no corresponding frob event then skip continue amount = asset_normalized_value( amount=hexstr_to_int(event['topics'][3]), asset=vault.collateral_asset, ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral withdrawal', ) vault_events.append( VaultEvent( event_type=VaultEventType.WITHDRAW_COLLATERAL, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) total_dai_wei = 0 # Get the dai generation events argument_filters = { 'sig': '0xbb35783b', # move 'arg1': address_to_bytes32(urn), # For CDPs that were created by migrating from SAI the first DAI generation # during vault creation will have the old owner as arg2. So we can't # filter for it here. Still seems like the urn as arg1 is sufficient # 'arg2': address_to_bytes32(proxy), } events = self.ethereum.get_logs( contract_address=MAKERDAO_VAT.address, abi=MAKERDAO_VAT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_VAT.deployed_block, ) for event in events: given_amount = _shift_num_right_by( hexstr_to_int(event['topics'][3]), RAY_DIGITS) total_dai_wei += given_amount amount = token_normalized_value( token_amount=given_amount, token=A_DAI, ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='vault debt generation', ) vault_events.append( VaultEvent( event_type=VaultEventType.GENERATE_DEBT, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) # Get the dai payback events argument_filters = { 'sig': '0x3b4da69f', # join 'usr': proxy, 'arg1': address_to_bytes32(urn), } events = self.ethereum.get_logs( contract_address=MAKERDAO_DAI_JOIN.address, abi=MAKERDAO_DAI_JOIN.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_DAI_JOIN.deployed_block, ) for event in events: given_amount = hexstr_to_int(event['topics'][3]) total_dai_wei -= given_amount amount = token_normalized_value( token_amount=given_amount, token=A_DAI, ) if amount == ZERO: # it seems there is a zero DAI value transfer from the urn when # withdrawing ETH. So we should ignore these as events continue timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='vault debt payback', ) vault_events.append( VaultEvent( event_type=VaultEventType.PAYBACK_DEBT, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) # Get the liquidation events argument_filters = {'urn': urn} events = self.ethereum.get_logs( contract_address=MAKERDAO_CAT.address, abi=MAKERDAO_CAT.abi, event_name='Bite', argument_filters=argument_filters, from_block=MAKERDAO_CAT.deployed_block, ) sum_liquidation_amount = ZERO sum_liquidation_usd = ZERO for event in events: if isinstance(event['data'], str): lot = event['data'][:66] else: # bytes lot = event['data'][:32] amount = asset_normalized_value( amount=hexstr_to_int(lot), asset=vault.collateral_asset, ) timestamp = self.ethereum.get_event_timestamp(event) sum_liquidation_amount += amount usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral liquidation', ) amount_usd_value = amount * usd_price sum_liquidation_usd += amount_usd_value vault_events.append( VaultEvent( event_type=VaultEventType.LIQUIDATION, value=Balance(amount, amount_usd_value), timestamp=timestamp, tx_hash=event['transactionHash'], )) total_interest_owed = vault.debt.amount - token_normalized_value( token_amount=total_dai_wei, token=A_DAI, ) # sort vault events by timestamp vault_events.sort(key=lambda event: event.timestamp) return MakerDAOVaultDetails( identifier=vault.identifier, collateral_asset=vault.collateral_asset, total_interest_owed=total_interest_owed, creation_ts=creation_ts, total_liquidated=Balance(sum_liquidation_amount, sum_liquidation_usd), events=vault_events, )
def _historical_dsr_for_account( self, account: ChecksumEthAddress, proxy: ChecksumEthAddress, ) -> DSRAccountReport: """Creates a historical DSR report for a single account May raise: - RemoteError if etherscan is used and there is a problem with reaching it or with the returned result. - BlockchainQueryError if an ethereum node is used and the contract call queries fail for some reason """ movements = [] join_normalized_balances = [] exit_normalized_balances = [] argument_filters = { 'sig': '0x049878f3', # join 'usr': proxy, } join_events = self.ethereum.get_logs( contract_address=MAKERDAO_POT.address, abi=MAKERDAO_POT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_POT.deployed_block, ) for join_event in join_events: try: wad_val = hexstr_to_int(join_event['topics'][2]) except DeserializationError as e: msg = f'Error at reading DSR join event topics. {str(e)}. Skipping event...' self.msg_aggregator.add_error(msg) continue join_normalized_balances.append(wad_val) # and now get the deposit amount block_number = join_event['blockNumber'] dai_value = self._get_vat_join_exit_at_transaction( movement_type='join', proxy_address=proxy, block_number=block_number, transaction_index=join_event['transactionIndex'], ) if dai_value is None: self.msg_aggregator.add_error( 'Did not find corresponding vat.move event for pot join. Skipping ...', ) continue timestamp = self.ethereum.get_event_timestamp(join_event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='DSR deposit', ) movements.append( DSRMovement( movement_type='deposit', address=account, normalized_balance=wad_val, amount=dai_value, amount_usd_value=_dsrdai_to_dai(dai_value) * usd_price, block_number=join_event['blockNumber'], timestamp=timestamp, tx_hash=join_event['transactionHash'], ), ) argument_filters = { 'sig': '0x7f8661a1', # exit 'usr': proxy, } exit_events = self.ethereum.get_logs( contract_address=MAKERDAO_POT.address, abi=MAKERDAO_POT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_POT.deployed_block, ) for exit_event in exit_events: try: wad_val = hexstr_to_int(exit_event['topics'][2]) except DeserializationError as e: msg = f'Error at reading DSR exit event topics. {str(e)}. Skipping event...' self.msg_aggregator.add_error(msg) continue exit_normalized_balances.append(wad_val) block_number = exit_event['blockNumber'] # and now get the withdrawal amount dai_value = self._get_vat_join_exit_at_transaction( movement_type='exit', proxy_address=proxy, block_number=block_number, transaction_index=exit_event['transactionIndex'], ) if dai_value is None: self.msg_aggregator.add_error( 'Did not find corresponding vat.move event for pot exit. Skipping ...', ) continue timestamp = self.ethereum.get_event_timestamp(exit_event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='DSR withdrawal', ) movements.append( DSRMovement( movement_type='withdrawal', address=account, normalized_balance=wad_val, amount=dai_value, amount_usd_value=_dsrdai_to_dai(dai_value) * usd_price, block_number=exit_event['blockNumber'], timestamp=timestamp, tx_hash=exit_event['transactionHash'], ), ) normalized_balance = 0 amount_in_dsr = 0 movements.sort(key=lambda x: x.block_number) for idx, m in enumerate(movements): if m.normalized_balance == 0: # skip 0 amount/balance movements. Consider last gain as last gain so far. if idx == 0: m.gain_so_far = 0 m.gain_so_far_usd_value = ZERO else: m.gain_so_far = movements[idx - 1].gain_so_far m.gain_so_far_usd_value = movements[ idx - 1].gain_so_far_usd_value continue if normalized_balance == m.normalized_balance: m.gain_so_far = m.amount - amount_in_dsr else: current_chi = FVal(m.amount) / FVal(m.normalized_balance) gain_so_far = normalized_balance * current_chi - amount_in_dsr m.gain_so_far = gain_so_far.to_int(exact=False) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=m.timestamp, default_value=FVal(1), location='DSR movement', ) m.gain_so_far_usd_value = _dsrdai_to_dai(m.gain_so_far) * usd_price if m.movement_type == 'deposit': normalized_balance += m.normalized_balance amount_in_dsr += m.amount else: # withdrawal amount_in_dsr -= m.amount normalized_balance -= m.normalized_balance chi = MAKERDAO_POT.call(self.ethereum, 'chi') normalized_balance = normalized_balance * chi gain = normalized_balance - amount_in_dsr try: current_dai_price = Inquirer().find_usd_price(A_DAI) except RemoteError: current_dai_price = Price(FVal(1)) # Calculate the total gain so far in USD unaccounted_gain = _dsrdai_to_dai(gain) last_usd_value = ZERO last_dai_gain = 0 if len(movements) != 0: last_usd_value = movements[-1].gain_so_far_usd_value last_dai_gain = movements[-1].gain_so_far unaccounted_gain = _dsrdai_to_dai(gain - last_dai_gain) gain_so_far_usd_value = unaccounted_gain * current_dai_price + last_usd_value return DSRAccountReport( movements=movements, gain_so_far=gain, gain_so_far_usd_value=gain_so_far_usd_value, )