def _get_comp_events( self, address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: self.ethereum.get_blocknumber_by_time(from_ts) from_block = max( COMP_DEPLOYED_BLOCK, self.ethereum.get_blocknumber_by_time(from_ts), ) argument_filters = { 'from': COMPTROLLER_PROXY.address, 'to': address, } comp_events = self.ethereum.get_logs( contract_address=A_COMP.ethereum_address, abi=ERC20TOKEN_ABI, event_name='Transfer', argument_filters=argument_filters, from_block=from_block, to_block=self.ethereum.get_blocknumber_by_time(to_ts), ) events = [] for event in comp_events: timestamp = self.ethereum.get_event_timestamp(event) tx_hash = event['transactionHash'] amount = token_normalized_value(hexstr_to_int(event['data']), A_COMP) usd_price = query_usd_price_zero_if_error( asset=A_COMP, time=timestamp, location=f'comp_claim {tx_hash}', msg_aggregator=self.msg_aggregator, ) value = Balance(amount, amount * usd_price) events.append( CompoundEvent( event_type='comp', address=address, block_number=event['blockNumber'], timestamp=timestamp, asset=A_COMP, value=value, to_asset=None, to_value=None, realized_pnl=value, tx_hash=tx_hash, log_index=event['logIndex'], )) return events
def _get_vat_join_exit_at_transaction( self, movement_type: Literal['join', 'exit'], proxy_address: ChecksumEthAddress, block_number: int, transaction_index: int, ) -> Optional[int]: """Returns values in DSR DAI that were deposited/withdrawn at a block number and tx index DSR DAI means they need they have a lot more digits than normal DAI and they need to be divided by RAD (10**45) in order to get real DAI value. Keeping it like that since most calculations deal with RAD precision in DSR. Returns None if no value was found of if there was an error with conversion. May raise: - RemoteError if etherscan is used and there is a problem with reaching it or with the returned result. - BlockchainQueryError if an ethereum node is used and the contract call queries fail for some reason """ argument_filters = { 'sig': '0x3b4da69f' if movement_type == 'join' else '0xef693bed', 'usr': proxy_address, } events = self.ethereum.get_logs( contract_address=MAKERDAO_DAI_JOIN.address, abi=MAKERDAO_DAI_JOIN.abi, event_name='LogNote', argument_filters=argument_filters, from_block=block_number, to_block=block_number, ) value = None for event in events: if event['transactionIndex'] == transaction_index: if value is not None: log.error( # type: ignore 'Mistaken assumption: There is multiple vat.move events for ' 'the same transaction', ) try: value = hexstr_to_int(event['topics'][3]) break except ConversionError: value = None return value * RAY # turn it from DAI to RAD
def _query_vault_details( self, vault: MakerDAOVault, proxy: ChecksumEthAddress, urn: ChecksumEthAddress, ) -> Optional[MakerDAOVaultDetails]: # They can raise: # ConversionError due to hex_or_bytes_to_address, hexstr_to_int # RemoteError due to external query errors events = self.ethereum.get_logs( contract_address=MAKERDAO_CDP_MANAGER.address, abi=MAKERDAO_CDP_MANAGER.abi, event_name='NewCdp', argument_filters={'cdp': vault.identifier}, from_block=MAKERDAO_CDP_MANAGER.deployed_block, ) if len(events) == 0: self.msg_aggregator.add_error( 'No events found for a Vault creation. This should never ' 'happen. Please open a bug report: https://github.com/rotki/rotki/issues', ) return None if len(events) != 1: log.error( f'Multiple events found for a Vault creation: {events}. Taking ' f'only the first. This should not happen. Something is wrong', ) self.msg_aggregator.add_error( 'Multiple events found for a Vault creation. This should never ' 'happen. Please open a bug report: https://github.com/rotki/rotki/issues', ) creation_ts = self.ethereum.get_event_timestamp(events[0]) # get vat frob events for cross-checking argument_filters = { 'sig': '0x76088703', # frob 'arg1': '0x' + vault.ilk.hex(), # ilk 'arg2': address_to_bytes32(urn), # urn # arg3 can be urn for the 1st deposit, and proxy/owner for the next ones # so don't filter for it # 'arg3': address_to_bytes32(proxy), # proxy - owner } frob_events = self.ethereum.get_logs( contract_address=MAKERDAO_VAT.address, abi=MAKERDAO_VAT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_VAT.deployed_block, ) frob_event_tx_hashes = [x['transactionHash'] for x in frob_events] gemjoin = GEMJOIN_MAPPING.get(vault.collateral_type, None) if gemjoin is None: self.msg_aggregator.add_warning( f'Unknown makerdao vault collateral type detected {vault.collateral_type}.' 'Skipping ...', ) return None vault_events = [] # Get the collateral deposit events argument_filters = { 'sig': '0x3b4da69f', # join # In cases where a CDP has been migrated from a SAI CDP to a DAI # Vault the usr in the first deposit will be the old address. To # detect the first deposit in these cases we need to check for # arg1 being the urn # 'usr': proxy, 'arg1': address_to_bytes32(urn), } events = self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, ) # all subsequent deposits should have the proxy as a usr # but for non-migrated CDPS the previous query would also work # so in those cases we will have the first deposit 2 times argument_filters = { 'sig': '0x3b4da69f', # join 'usr': proxy, } events.extend( self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, )) deposit_tx_hashes = set() for event in events: tx_hash = event['transactionHash'] if tx_hash in deposit_tx_hashes: # Skip duplicate deposit that would be detected in non migrated CDP case continue if tx_hash not in frob_event_tx_hashes: # If there is no corresponding frob event then skip continue deposit_tx_hashes.add(tx_hash) amount = asset_normalized_value( amount=hexstr_to_int(event['topics'][3]), asset=vault.collateral_asset, ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral deposit', ) vault_events.append( VaultEvent( event_type=VaultEventType.DEPOSIT_COLLATERAL, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=tx_hash, )) # Get the collateral withdrawal events argument_filters = { 'sig': '0xef693bed', # exit 'usr': proxy, } events = self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, ) for event in events: tx_hash = event['transactionHash'] if tx_hash not in frob_event_tx_hashes: # If there is no corresponding frob event then skip continue amount = asset_normalized_value( amount=hexstr_to_int(event['topics'][3]), asset=vault.collateral_asset, ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral withdrawal', ) vault_events.append( VaultEvent( event_type=VaultEventType.WITHDRAW_COLLATERAL, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) total_dai_wei = 0 # Get the dai generation events argument_filters = { 'sig': '0xbb35783b', # move 'arg1': address_to_bytes32(urn), # For CDPs that were created by migrating from SAI the first DAI generation # during vault creation will have the old owner as arg2. So we can't # filter for it here. Still seems like the urn as arg1 is sufficient # 'arg2': address_to_bytes32(proxy), } events = self.ethereum.get_logs( contract_address=MAKERDAO_VAT.address, abi=MAKERDAO_VAT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_VAT.deployed_block, ) for event in events: given_amount = _shift_num_right_by( hexstr_to_int(event['topics'][3]), RAY_DIGITS) total_dai_wei += given_amount amount = token_normalized_value( token_amount=given_amount, token=A_DAI, ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='vault debt generation', ) vault_events.append( VaultEvent( event_type=VaultEventType.GENERATE_DEBT, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) # Get the dai payback events argument_filters = { 'sig': '0x3b4da69f', # join 'usr': proxy, 'arg1': address_to_bytes32(urn), } events = self.ethereum.get_logs( contract_address=MAKERDAO_DAI_JOIN.address, abi=MAKERDAO_DAI_JOIN.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_DAI_JOIN.deployed_block, ) for event in events: given_amount = hexstr_to_int(event['topics'][3]) total_dai_wei -= given_amount amount = token_normalized_value( token_amount=given_amount, token=A_DAI, ) if amount == ZERO: # it seems there is a zero DAI value transfer from the urn when # withdrawing ETH. So we should ignore these as events continue timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='vault debt payback', ) vault_events.append( VaultEvent( event_type=VaultEventType.PAYBACK_DEBT, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) # Get the liquidation events argument_filters = {'urn': urn} events = self.ethereum.get_logs( contract_address=MAKERDAO_CAT.address, abi=MAKERDAO_CAT.abi, event_name='Bite', argument_filters=argument_filters, from_block=MAKERDAO_CAT.deployed_block, ) sum_liquidation_amount = ZERO sum_liquidation_usd = ZERO for event in events: if isinstance(event['data'], str): lot = event['data'][:66] else: # bytes lot = event['data'][:32] amount = asset_normalized_value( amount=hexstr_to_int(lot), asset=vault.collateral_asset, ) timestamp = self.ethereum.get_event_timestamp(event) sum_liquidation_amount += amount usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral liquidation', ) amount_usd_value = amount * usd_price sum_liquidation_usd += amount_usd_value vault_events.append( VaultEvent( event_type=VaultEventType.LIQUIDATION, value=Balance(amount, amount_usd_value), timestamp=timestamp, tx_hash=event['transactionHash'], )) total_interest_owed = vault.debt.amount - token_normalized_value( token_amount=total_dai_wei, token=A_DAI, ) # sort vault events by timestamp vault_events.sort(key=lambda event: event.timestamp) return MakerDAOVaultDetails( identifier=vault.identifier, collateral_asset=vault.collateral_asset, total_interest_owed=total_interest_owed, creation_ts=creation_ts, total_liquidated=Balance(sum_liquidation_amount, sum_liquidation_usd), events=vault_events, )
def _get_vault_withdraw_events( self, vault: YearnVault, address: ChecksumEthAddress, from_block: int, to_block: int, ) -> List[YearnVaultEvent]: """Get all withdraw events of the underlying token to the vault""" events: List[YearnVaultEvent] = [] argument_filters = {'from': vault.contract.address, 'to': address} withdraw_events = self.ethereum.get_logs( contract_address=vault.underlying_token.ethereum_address, abi=ERC20TOKEN_ABI, event_name='Transfer', argument_filters=argument_filters, from_block=from_block, to_block=to_block, ) for withdraw_event in withdraw_events: timestamp = self.ethereum.get_event_timestamp(withdraw_event) withdraw_amount = token_normalized_value( token_amount=hexstr_to_int(withdraw_event['data']), token=vault.token, ) tx_hash = withdraw_event['transactionHash'] tx_receipt = self.ethereum.get_transaction_receipt(tx_hash) withdraw_index = withdraw_event['logIndex'] burn_amount = None for log in tx_receipt['logs']: found_event = ( log['topics'][0] == '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef' and # noqa: E501 log['topics'][1] == address_to_bytes32(address) and log['topics'][2] == address_to_bytes32(ZERO_ADDRESS)) if found_event: # found the burn log burn_amount = token_normalized_value( token_amount=hexstr_to_int(log['data']), token=vault.token, ) if burn_amount is None: self.msg_aggregator.add_error( f'Ignoring yearn withdraw event with tx_hash {tx_hash} and log index ' f'{withdraw_index} due to inability to find corresponding burn event', ) continue withdraw_usd_price = get_usd_price_zero_if_error( asset=vault.underlying_token, time=timestamp, location='yearn vault withdraw', msg_aggregator=self.msg_aggregator, ) burn_usd_price = get_usd_price_zero_if_error( asset=vault.token, time=timestamp, location='yearn vault withdraw', msg_aggregator=self.msg_aggregator, ) events.append( YearnVaultEvent( event_type='withdraw', block_number=withdraw_event['blockNumber'], timestamp=timestamp, from_asset=vault.token, from_value=Balance( amount=burn_amount, usd_value=burn_amount * burn_usd_price, ), to_asset=vault.underlying_token, to_value=Balance( amount=withdraw_amount, usd_value=withdraw_amount * withdraw_usd_price, ), realized_pnl=None, tx_hash=tx_hash, log_index=withdraw_index, )) return events
def _historical_dsr_for_account( self, account: ChecksumEthAddress, proxy: ChecksumEthAddress, ) -> DSRAccountReport: """Creates a historical DSR report for a single account May raise: - RemoteError if etherscan is used and there is a problem with reaching it or with the returned result. - BlockchainQueryError if an ethereum node is used and the contract call queries fail for some reason """ movements = [] join_normalized_balances = [] exit_normalized_balances = [] argument_filters = { 'sig': '0x049878f3', # join 'usr': proxy, } join_events = self.ethereum.get_logs( contract_address=MAKERDAO_POT.address, abi=MAKERDAO_POT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_POT.deployed_block, ) for join_event in join_events: try: wad_val = hexstr_to_int(join_event['topics'][2]) except DeserializationError as e: msg = f'Error at reading DSR join event topics. {str(e)}. Skipping event...' self.msg_aggregator.add_error(msg) continue join_normalized_balances.append(wad_val) # and now get the deposit amount block_number = join_event['blockNumber'] dai_value = self._get_vat_join_exit_at_transaction( movement_type='join', proxy_address=proxy, block_number=block_number, transaction_index=join_event['transactionIndex'], ) if dai_value is None: self.msg_aggregator.add_error( 'Did not find corresponding vat.move event for pot join. Skipping ...', ) continue timestamp = self.ethereum.get_event_timestamp(join_event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='DSR deposit', ) movements.append( DSRMovement( movement_type='deposit', address=account, normalized_balance=wad_val, amount=dai_value, amount_usd_value=_dsrdai_to_dai(dai_value) * usd_price, block_number=join_event['blockNumber'], timestamp=timestamp, tx_hash=join_event['transactionHash'], ), ) argument_filters = { 'sig': '0x7f8661a1', # exit 'usr': proxy, } exit_events = self.ethereum.get_logs( contract_address=MAKERDAO_POT.address, abi=MAKERDAO_POT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_POT.deployed_block, ) for exit_event in exit_events: try: wad_val = hexstr_to_int(exit_event['topics'][2]) except DeserializationError as e: msg = f'Error at reading DSR exit event topics. {str(e)}. Skipping event...' self.msg_aggregator.add_error(msg) continue exit_normalized_balances.append(wad_val) block_number = exit_event['blockNumber'] # and now get the withdrawal amount dai_value = self._get_vat_join_exit_at_transaction( movement_type='exit', proxy_address=proxy, block_number=block_number, transaction_index=exit_event['transactionIndex'], ) if dai_value is None: self.msg_aggregator.add_error( 'Did not find corresponding vat.move event for pot exit. Skipping ...', ) continue timestamp = self.ethereum.get_event_timestamp(exit_event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='DSR withdrawal', ) movements.append( DSRMovement( movement_type='withdrawal', address=account, normalized_balance=wad_val, amount=dai_value, amount_usd_value=_dsrdai_to_dai(dai_value) * usd_price, block_number=exit_event['blockNumber'], timestamp=timestamp, tx_hash=exit_event['transactionHash'], ), ) normalized_balance = 0 amount_in_dsr = 0 movements.sort(key=lambda x: x.block_number) for idx, m in enumerate(movements): if m.normalized_balance == 0: # skip 0 amount/balance movements. Consider last gain as last gain so far. if idx == 0: m.gain_so_far = 0 m.gain_so_far_usd_value = ZERO else: m.gain_so_far = movements[idx - 1].gain_so_far m.gain_so_far_usd_value = movements[ idx - 1].gain_so_far_usd_value continue if normalized_balance == m.normalized_balance: m.gain_so_far = m.amount - amount_in_dsr else: current_chi = FVal(m.amount) / FVal(m.normalized_balance) gain_so_far = normalized_balance * current_chi - amount_in_dsr m.gain_so_far = gain_so_far.to_int(exact=False) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=m.timestamp, default_value=FVal(1), location='DSR movement', ) m.gain_so_far_usd_value = _dsrdai_to_dai(m.gain_so_far) * usd_price if m.movement_type == 'deposit': normalized_balance += m.normalized_balance amount_in_dsr += m.amount else: # withdrawal amount_in_dsr -= m.amount normalized_balance -= m.normalized_balance chi = MAKERDAO_POT.call(self.ethereum, 'chi') normalized_balance = normalized_balance * chi gain = normalized_balance - amount_in_dsr try: current_dai_price = Inquirer().find_usd_price(A_DAI) except RemoteError: current_dai_price = Price(FVal(1)) # Calculate the total gain so far in USD unaccounted_gain = _dsrdai_to_dai(gain) last_usd_value = ZERO last_dai_gain = 0 if len(movements) != 0: last_usd_value = movements[-1].gain_so_far_usd_value last_dai_gain = movements[-1].gain_so_far unaccounted_gain = _dsrdai_to_dai(gain - last_dai_gain) gain_so_far_usd_value = unaccounted_gain * current_dai_price + last_usd_value return DSRAccountReport( movements=movements, gain_so_far=gain, gain_so_far_usd_value=gain_so_far_usd_value, )
def _try_get_chi_close_to(self, time: Timestamp) -> FVal: """Best effort attempt to get a chi value close to the given timestamp It can't be 100% accurate since we use the logs of join() or exit() in order to find the closest time chi was changed. It also may not work if for some reason there is no logs in the block range we are looking for. Better solution would have been an archive node's query. May raise: - RemoteError if there are problems with querying etherscan - ChiRetrievalError if we are unable to query chi at the given timestamp - BlockchainQueryError if an ethereum node is used and the contract call queries fail for some reason """ if time > 1584386100: # If the time is after 16/03/2020 19:15 GMT we know that # makerdao DSR was set to 0% we know chi has not changed # https://twitter.com/MakerDAO/status/1239270910810411008 return FVal('1018008449363110619399951035') block_number = self.ethereum.etherscan.get_blocknumber_by_time(time) latest_block = self.ethereum.get_latest_block_number() blocks_queried = 0 counter = 1 # Keep trying to find events that could reveal the chi to us. Go back # as far as MAX_BLOCKS_TO_QUERY and only then give up while blocks_queried < MAX_BLOCKS_TO_QUERY: back_from_block = max( MAKERDAO_POT.deployed_block, block_number - counter * CHI_BLOCKS_SEARCH_DISTANCE, ) back_to_block = block_number - (counter - 1) * CHI_BLOCKS_SEARCH_DISTANCE forward_from_block = min( latest_block, block_number + (counter - 1) * CHI_BLOCKS_SEARCH_DISTANCE, ) forward_to_block = min( latest_block, block_number + CHI_BLOCKS_SEARCH_DISTANCE, ) back_joins, back_exits = self._get_join_exit_events( back_from_block, back_to_block) forward_joins, forward_exits = self._get_join_exit_events( from_block=forward_from_block, to_block=forward_to_block, ) no_results = all( len(x) == 0 for x in (back_joins, back_exits, forward_joins, forward_exits)) if latest_block == forward_to_block and no_results: # if our forward querying got us to the latest block and there is # still no other results, then take current chi return self.ethereum.call_contract( contract_address=MAKERDAO_POT.address, abi=MAKERDAO_POT.abi, method_name='chi', ) if not no_results: # got results! break blocks_queried += 2 * CHI_BLOCKS_SEARCH_DISTANCE counter += 1 if no_results: raise ChiRetrievalError( f'Found no DSR events around timestamp {time}. Cant query chi.', ) # Find the closest event to the to_block number, looking both at events # in the blocks before and in the blocks after block_number found_event = None back_event = _find_closest_event(back_joins, back_exits, -1, operator.gt) forward_event = _find_closest_event(forward_joins, forward_exits, 0, operator.lt) if back_event and not forward_event: found_event = back_event elif forward_event and not back_event: found_event = forward_event else: # We have both backward and forward events, get the one closer to block number back_block_number = back_event['blockNumber'] # type: ignore forward_block_number = forward_event['blockNumber'] # type: ignore if block_number - back_block_number <= forward_block_number - block_number: found_event = back_event else: found_event = forward_event assert found_event, 'at this point found_event should be populated' # helps mypy event_block_number = found_event['blockNumber'] first_topic = found_event['topics'][0] amount = self._get_vat_join_exit_at_transaction( movement_type='join' if first_topic.startswith('0x049878f3') else 'exit', proxy_address=hex_or_bytes_to_address(found_event['topics'][1]), block_number=event_block_number, transaction_index=found_event['transactionIndex'], ) if amount is None: raise ChiRetrievalError( f'Found no VAT.move events around timestamp {time}. Cant query chi.', ) wad_val = hexstr_to_int(found_event['topics'][2]) chi = FVal(amount) / FVal(wad_val) return chi
def get_events_for_atoken_and_address( self, user_address: ChecksumEthAddress, atoken: EthereumToken, deposit_events: List[Dict[str, Any]], withdraw_events: List[Dict[str, Any]], from_block: int, to_block: int, ) -> List[AaveEvent]: """This function should be entered while holding the history_lock semaphore""" argument_filters = { 'from': ZERO_ADDRESS, 'to': user_address, } mint_events = self.ethereum.get_logs( contract_address=atoken.ethereum_address, abi=ATOKEN_ABI, event_name='Transfer', argument_filters=argument_filters, from_block=from_block, to_block=to_block, ) mint_data = set() mint_data_to_log_index = {} for event in mint_events: amount = hexstr_to_int(event['data']) if amount == 0: continue # first mint can be for 0. Ignore entry = ( event['blockNumber'], amount, self.ethereum.get_event_timestamp(event), event['transactionHash'], ) mint_data.add(entry) mint_data_to_log_index[entry] = event['logIndex'] reserve_asset = ATOKENV1_TO_ASSET[ atoken] # should never raise KeyError reserve_address, decimals = _get_reserve_address_decimals( reserve_asset) aave_events: List[AaveEvent] = [] for event in deposit_events: if hex_or_bytes_to_address(event['topics'][1]) == reserve_address: # first 32 bytes of the data are the amount deposit = hexstr_to_int(event['data'][:66]) block_number = event['blockNumber'] timestamp = self.ethereum.get_event_timestamp(event) tx_hash = event['transactionHash'] log_index = event['logIndex'] # If there is a corresponding deposit event remove the minting event data entry = (block_number, deposit, timestamp, tx_hash) if entry in mint_data: mint_data.remove(entry) del mint_data_to_log_index[entry] usd_price = query_usd_price_zero_if_error( asset=reserve_asset, time=timestamp, location='aave deposit', msg_aggregator=self.msg_aggregator, ) deposit_amount = deposit / (FVal(10)**FVal(decimals)) aave_events.append( AaveDepositWithdrawalEvent( event_type='deposit', asset=reserve_asset, atoken=atoken, value=Balance( amount=deposit_amount, usd_value=deposit_amount * usd_price, ), block_number=block_number, timestamp=timestamp, tx_hash=tx_hash, log_index=log_index, )) for data in mint_data: usd_price = query_usd_price_zero_if_error( asset=atoken, time=data[2], location='aave interest profit', msg_aggregator=self.msg_aggregator, ) interest_amount = data[1] / (FVal(10)**FVal(decimals)) aave_events.append( AaveInterestEvent( event_type='interest', asset=atoken, value=Balance( amount=interest_amount, usd_value=interest_amount * usd_price, ), block_number=data[0], timestamp=data[2], tx_hash=data[3], log_index=mint_data_to_log_index[data], )) for event in withdraw_events: if hex_or_bytes_to_address(event['topics'][1]) == reserve_address: # first 32 bytes of the data are the amount withdrawal = hexstr_to_int(event['data'][:66]) block_number = event['blockNumber'] timestamp = self.ethereum.get_event_timestamp(event) tx_hash = event['transactionHash'] usd_price = query_usd_price_zero_if_error( asset=reserve_asset, time=timestamp, location='aave withdrawal', msg_aggregator=self.msg_aggregator, ) withdrawal_amount = withdrawal / (FVal(10)**FVal(decimals)) aave_events.append( AaveDepositWithdrawalEvent( event_type='withdrawal', asset=reserve_asset, atoken=atoken, value=Balance( amount=withdrawal_amount, usd_value=withdrawal_amount * usd_price, ), block_number=block_number, timestamp=timestamp, tx_hash=tx_hash, log_index=event['logIndex'], )) return aave_events
def add_receipt_data(self, data: Dict[str, Any]) -> None: """Add tx receipt data as they are returned by the chain to the DB This assumes the transaction is already in the DB. May raise: - Key Error if any of the expected fields are missing - DeserializationError if there is a problem deserializing a value - sqlcipher.DatabaseError if the transaction hash is not in the DB or if the receipt already exists in the DB. TODO: Differentiate? """ tx_hash_b = hexstring_to_bytes(data['transactionHash']) # some nodes miss the type field for older non EIP1559 transactions. So assume legacy (0) tx_type = hexstr_to_int(data.get('type', '0x0')) cursor = self.db.conn.cursor() status = data.get('status', 1) # status may be missing for older txs. Assume 1. if status is None: status = 1 contract_address = deserialize_ethereum_address(data['contractAddress']) if data['contractAddress'] else None # noqa: E501 cursor.execute( 'INSERT INTO ethtx_receipts (tx_hash, contract_address, status, type) ' 'VALUES(?, ?, ?, ?) ', (tx_hash_b, contract_address, status, tx_type), ) log_tuples = [] topic_tuples = [] for log_entry in data['logs']: log_index = log_entry['logIndex'] log_tuples.append(( tx_hash_b, log_index, hexstring_to_bytes(log_entry['data']), deserialize_ethereum_address(log_entry['address']), int(log_entry['removed']), )) for idx, topic in enumerate(log_entry['topics']): topic_tuples.append(( tx_hash_b, log_index, hexstring_to_bytes(topic), idx, )) if len(log_tuples) != 0: cursor.executemany( 'INSERT INTO ethtx_receipt_logs (tx_hash, log_index, data, address, removed) ' 'VALUES(? ,? ,? ,? ,?)', log_tuples, ) if len(topic_tuples) != 0: cursor.executemany( 'INSERT INTO ethtx_receipt_log_topics (tx_hash, log_index, topic, topic_index) ' # noqa: E501 'VALUES(? ,? ,?, ?)', topic_tuples, ) self.db.conn.commit() self.db.update_last_write()
def classify_tx(account: Account, tx_hash: str, txn: EthereumTransaction, receipt: dict) \ -> List[LedgerAction]: actions = [] # type: List[LedgerAction] tx_time = serialize_timestamp(txn.timestamp) if txn.from_address != account.address: return actions for event in receipt['logs']: if event['topics'][0] == CLAIMED and event[ 'address'] == ADDR_UNISWAP_AIRDROP.lower(): amount = hexstr_to_int(event['data'][130:]) actions += [ LedgerAction( identifier=None, location='', action_type=LedgerActionType.AIRDROP, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token( '_ceth_0x1f9840a85d5af5bf1d1762f925bdaddc4201f984'), notes='', link=tx_hash) ] elif event['topics'][0] == CLAIMED and event[ 'address'] == ADDR_MIRROR_AIRDROP.lower(): amount = hexstr_to_int(event['data'][130:]) actions += [ LedgerAction( identifier=None, location='', action_type=LedgerActionType.AIRDROP, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token( '_ceth_0x09a3ecafa817268f77be1283176b946c4ff2e608'), notes='', link=tx_hash) ] elif event['topics'][0] == CLAIMED and event[ 'address'] == ADDR_POOL_AIRDROP.lower(): amount = hexstr_to_int(event['data'][130:]) actions += [ LedgerAction( identifier=None, location='', action_type=LedgerActionType.AIRDROP, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token( '_ceth_0x0cec1a9154ff802e7934fc916ed7ca50bde6844e'), notes='PoolTogether airdrop', link=tx_hash) ] elif event['topics'][0] == CLAIMED and event[ 'address'] == ADDR_IMX_AIRDROP.lower(): amount = hexstr_to_int(event['data'][130:]) actions += [ LedgerAction( identifier=None, location='', action_type=LedgerActionType.AIRDROP, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token( '_ceth_0x7b35ce522cb72e4077baeb96cb923a5529764a00'), notes='IMX airdrop', link=tx_hash) ] elif event['topics'][0] == CLAIMED: logger.warning('Unknown Claimed event for tx %s at %s', tx_hash, tx_time) if event['topics'][0] == CLAIMED_3 and event[ 'address'] == ADDR_BADGER_TREE.lower(): if hexstr_to_int(event['topics'][2]) == hexstr_to_int(ADDR_BADGER): amount = hexstr_to_int(event['data'][2:66]) token = symbol_to_asset_or_token( '_ceth_0x3472a5a71965499acd81997a54bba8d852c6e53d') actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=token, notes='Badger rewards for staking', link=tx_hash) ] elif event['topics'][0] == CLAIMED_3: logger.warning('Unknown Claimed event for tx %s at %s', tx_hash, tx_time) if event['topics'][0] == CLAIMED_2 and event[ 'address'] == ADDR_XTOKEN_AIRDROP.lower(): amount = hexstr_to_int(event['data']) actions += [ LedgerAction( identifier=None, location='', action_type=LedgerActionType.AIRDROP, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token( '_ceth_0x7f3edcdd180dbe4819bd98fee8929b5cedb3adeb'), notes='xToken airdrop', link=tx_hash) ] elif event['topics'][0] == CLAIMED_2 and event[ 'address'] == ADDR_BALANCER_REWARDS.lower(): amount = hexstr_to_int(event['data'][66:]) actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('BAL'), notes='Balancer rewards for providing liquidity', link=tx_hash) ] elif event['topics'][0] == CLAIMED_2 and event[ 'address'] == ADDR_ROOK_REWARDS.lower(): amount = hexstr_to_int(event['data'][2:]) actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('ROOK'), notes='Rook rewards for providing liquidity', link=tx_hash) ] elif event['topics'][0] == CLAIMED_2: logger.warning('Unknown Claimed event for tx: %s', tx_hash) if event['topics'][0] == CLAIMED_4 and event[ 'address'] == ADDR_GITCOIN_AIRDROP.lower(): amount = hexstr_to_int(event['data'][2:][128:192]) actions += [ LedgerAction( identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token( '_ceth_0xDe30da39c46104798bB5aA3fe8B9e0e1F348163F'), notes='Gitcoin retroactive airdrop', link=tx_hash) ] elif event['topics'][0] == CLAIMED_4: logger.warning('Unknown Claimed event for tx: %s', tx_hash) if event['topics'][0] == REWARD_PAID and event[ 'address'] in ADDR_PIEDAO_INCENTIVES: amount = hexstr_to_int(event['data'][2:]) actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('DOUGH'), notes='rewards for providing liquidity', link=tx_hash) ] elif event['topics'][0] == REWARD_PAID and event[ 'address'] == ADDR_INDEX_REWARDS.lower(): amount = hexstr_to_int(event['data'][2:]) actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('INDEX'), notes='rewards for providing liquidity', link=tx_hash) ] elif event['topics'][0] == REWARD_PAID and event[ 'address'] == ADDR_YFI_GOVERNANCE.lower(): amount = hexstr_to_int(event['data'][2:]) actions += [ LedgerAction( identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('yDAI+yUSDC+yUSDT+yTUSD'), notes='rewards from yearn governance', link=tx_hash) ] elif event['topics'][0] == REWARD_PAID and event[ 'address'] in ADDR_CREAM_REWARDS: amount = hexstr_to_int(event['data'][2:]) actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('CREAM'), notes='rewards from cream incentives', link=tx_hash) ] elif event['topics'][0] == REWARD_PAID and event[ 'address'] == ADDR_MIR_REWARDS.lower(): amount = hexstr_to_int(event['data'][2:]) actions += [ LedgerAction( identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token( '_ceth_0x09a3ecafa817268f77be1283176b946c4ff2e608'), notes='rewards for staking MIR LP', link=tx_hash) ] elif event['topics'][0] == REWARD_PAID: logger.warning('Unknown RewardPaid event for tx %s at %s', tx_hash, tx_time) if event['topics'][0] == MINTED and event[ 'address'] == ADDR_SWERVE_MINTER.lower(): if hexstr_to_int(event['topics'][1]) == hexstr_to_int( account.address): amount = hexstr_to_int(event['data'][66:]) actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('SWRV'), notes='Swerve rewards for pooling liquidity', link=tx_hash) ] elif event['topics'][0] == MINTED: logger.warning('Unknown Minted event for tx %s at %s', tx_hash, tx_time) if event['topics'][0] == PURCHASE and event[ 'address'] == ADDR_FEI_GENESIS_GROUP.lower(): if hexstr_to_int(event['topics'][1]) == hexstr_to_int( account.address): amount = hexstr_to_int(event['data']) actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.EXPENSE, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('ETH'), notes='Fei Genesis Commit', link=tx_hash) ] elif event['topics'][0] == PURCHASE: logger.warning('Unknown Purchase event for tx %s at %s', tx_hash, tx_time) if event['topics'][0] == TRANSFER and event[ 'address'] == ADDR_DODO.lower(): if hexstr_to_int(event['topics'][1]) == hexstr_to_int(ADDR_DODO_REWARDS) and \ hexstr_to_int(event['topics'][2]) == hexstr_to_int(account.address): amount = hexstr_to_int(event['data']) actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('DODO'), notes='Claim DODO rewards', link=tx_hash) ] elif event['topics'][0] == TRANSFER and event[ 'address'] == ADDR_SUSHI.lower(): if hexstr_to_int(event['topics'][1]) == hexstr_to_int(ADDR_SUSHI_REWARDS) and \ hexstr_to_int(event['topics'][2]) == hexstr_to_int(account.address): amount = hexstr_to_int(event['data']) actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('SUSHI'), notes='Claim SUSHI rewards for staking LP', link=tx_hash) ] elif event['topics'][0] == TRANSFER and event[ 'address'] == ADDR_TORN.lower(): if hexstr_to_int(event['topics'][1]) == hexstr_to_int(ADDR_TORN_VTORN) and \ hexstr_to_int(event['topics'][2]) == hexstr_to_int(account.address): amount = hexstr_to_int(event['data']) actions += [ LedgerAction( identifier=None, location='', action_type=LedgerActionType.AIRDROP, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token( '_ceth_0x77777FeDdddFfC19Ff86DB637967013e6C6A116C' ), notes='TORN airdrop', link=tx_hash) ] elif event['topics'][0] == STAKEEND and event[ 'address'] == ADDR_HEX.lower(): if hexstr_to_int(event['topics'][1]) == hexstr_to_int( account.address): payout = hexstr_to_int(event['data'][2:][:18]) actions += [ LedgerAction(identifier=None, location='', action_type=LedgerActionType.INCOME, amount=FVal(payout) / FVal(1e8), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token('_ceth_' + ADDR_HEX), notes='HEX Payout for staking', link=tx_hash) ] if event['topics'][0] == HUNT and event[ 'address'] == ADDR_BLACKPOOL_AIRDROP.lower(): if hexstr_to_int(event['topics'][1]) == hexstr_to_int( account.address): amount = hexstr_to_int(event['data'][2:][64:128]) actions += [ LedgerAction( identifier=None, location='', action_type=LedgerActionType.AIRDROP, amount=FVal(amount) / FVal(1e18), rate=None, rate_asset=None, timestamp=txn.timestamp, asset=symbol_to_asset_or_token( '_ceth_0x0eC9F76202a7061eB9b3a7D6B59D36215A7e37da' ), notes='Blackpool airdrop', link=tx_hash) ] return actions