def _get_balances_graph( self, addresses: List[ChecksumEthAddress], ) -> ProtocolBalance: """Get the addresses' pools data querying the Uniswap subgraph Each liquidity position is converted into a <LiquidityPool>. """ address_balances: DDAddressBalances = defaultdict(list) known_assets: Set[EthereumToken] = set() unknown_assets: Set[UnknownEthereumToken] = set() addresses_lower = [address.lower() for address in addresses] querystr = format_query_indentation(LIQUIDITY_POSITIONS_QUERY.format()) param_types = { '$limit': 'Int!', '$offset': 'Int!', '$addresses': '[String!]', '$balance': 'BigDecimal!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'addresses': addresses_lower, 'balance': '0', } while True: result = self.graph.query( # type: ignore # caller already checks querystr=querystr, param_types=param_types, param_values=param_values, ) result_data = result['liquidityPositions'] for lp in result_data: user_address = to_checksum_address(lp['user']['id']) user_lp_balance = FVal(lp['liquidityTokenBalance']) lp_pair = lp['pair'] lp_address = to_checksum_address(lp_pair['id']) lp_total_supply = FVal(lp_pair['totalSupply']) # Insert LP tokens reserves within tokens dicts token0 = lp_pair['token0'] token0['total_amount'] = lp_pair['reserve0'] token1 = lp_pair['token1'] token1['total_amount'] = lp_pair['reserve1'] liquidity_pool_assets = [] for token in token0, token1: # Get the token <EthereumToken> or <UnknownEthereumToken> asset = get_ethereum_token( symbol=token['symbol'], ethereum_address=to_checksum_address(token['id']), name=token['name'], decimals=int(token['decimals']), ) # Classify the asset either as known or unknown if isinstance(asset, EthereumToken): known_assets.add(asset) elif isinstance(asset, UnknownEthereumToken): unknown_assets.add(asset) # Estimate the underlying asset total_amount asset_total_amount = FVal(token['total_amount']) user_asset_balance = ( user_lp_balance / lp_total_supply * asset_total_amount ) liquidity_pool_asset = LiquidityPoolAsset( asset=asset, total_amount=asset_total_amount, user_balance=Balance(amount=user_asset_balance), ) liquidity_pool_assets.append(liquidity_pool_asset) liquidity_pool = LiquidityPool( address=lp_address, assets=liquidity_pool_assets, total_supply=lp_total_supply, user_balance=Balance(amount=user_lp_balance), ) address_balances[user_address].append(liquidity_pool) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } protocol_balance = ProtocolBalance( address_balances=dict(address_balances), known_assets=known_assets, unknown_assets=unknown_assets, ) return protocol_balance
def test_add_and_get_yearn_vault_events(data_dir, username): """Test that get yearn vault events works fine and returns only events for what we need""" msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) addr1 = make_ethereum_address() addr1_events = [ YearnVaultEvent( event_type='deposit', from_asset=A_DAI, from_value=Balance(amount=ONE, usd_value=ONE), to_asset=A_YV1_DAI, to_value=Balance(amount=ONE, usd_value=ONE), realized_pnl=None, block_number=1, timestamp=Timestamp(1), tx_hash= '0x01653e88600a6492ad6e9ae2af415c990e623479057e4e93b163e65cfb2d4436', log_index=1, version=1, ), YearnVaultEvent( event_type='withdraw', from_asset=A_YV1_DAI, from_value=Balance(amount=ONE, usd_value=ONE), to_asset=A_DAI, to_value=Balance(amount=ONE, usd_value=ONE), realized_pnl=Balance(amount=FVal('0.01'), usd_value=FVal('0.01')), block_number=2, timestamp=Timestamp(2), tx_hash= '0x4147da3e5d3c0565a99192ce0b32182ab30b8e1067921d9b2a8ef3bd60b7e2ce', log_index=2, version=1, ) ] data.db.add_yearn_vaults_events(address=addr1, events=addr1_events) addr2 = make_ethereum_address() addr2_events = [ YearnVaultEvent( event_type='deposit', from_asset=A_DAI, from_value=Balance(amount=ONE, usd_value=ONE), to_asset=A_YV1_DAI, to_value=Balance(amount=ONE, usd_value=ONE), realized_pnl=None, block_number=1, timestamp=Timestamp(1), tx_hash= '0x8c094d58f33e8dedcd348cb33b58f3bd447602f1fecb99e51b1c2868029eab55', log_index=1, version=1, ), YearnVaultEvent( event_type='withdraw', from_asset=A_YV1_DAI, from_value=Balance(amount=ONE, usd_value=ONE), to_asset=A_DAI, to_value=Balance(amount=ONE, usd_value=ONE), realized_pnl=Balance(amount=FVal('0.01'), usd_value=FVal('0.01')), block_number=2, timestamp=Timestamp(2), tx_hash= '0x58c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de', log_index=2, version=1, ) ] data.db.add_yearn_vaults_events(address=addr2, events=addr2_events) events = data.db.get_yearn_vaults_events(address=addr1, vault=YEARN_VAULTS['yDAI']) assert events == addr1_events events = data.db.get_yearn_vaults_events(address=addr2, vault=YEARN_VAULTS['yDAI']) assert events == addr2_events
def _query_vault_details( self, vault: MakerDAOVault, proxy: ChecksumEthAddress, urn: ChecksumEthAddress, ) -> Optional[MakerDAOVaultDetails]: # They can raise: # ConversionError due to hex_or_bytes_to_address, hexstr_to_int # RemoteError due to external query errors events = self.ethereum.get_logs( contract_address=MAKERDAO_CDP_MANAGER.address, abi=MAKERDAO_CDP_MANAGER.abi, event_name='NewCdp', argument_filters={'cdp': vault.identifier}, from_block=MAKERDAO_CDP_MANAGER.deployed_block, ) if len(events) == 0: self.msg_aggregator.add_error( 'No events found for a Vault creation. This should never ' 'happen. Please open a bug report: https://github.com/rotki/rotki/issues', ) return None if len(events) != 1: log.error( f'Multiple events found for a Vault creation: {events}. Taking ' f'only the first. This should not happen. Something is wrong', ) self.msg_aggregator.add_error( 'Multiple events found for a Vault creation. This should never ' 'happen. Please open a bug report: https://github.com/rotki/rotki/issues', ) creation_ts = self.ethereum.get_event_timestamp(events[0]) # get vat frob events for cross-checking argument_filters = { 'sig': '0x76088703', # frob 'arg1': '0x' + vault.ilk.hex(), # ilk 'arg2': address_to_bytes32(urn), # urn # arg3 can be urn for the 1st deposit, and proxy/owner for the next ones # so don't filter for it # 'arg3': address_to_bytes32(proxy), # proxy - owner } frob_events = self.ethereum.get_logs( contract_address=MAKERDAO_VAT.address, abi=MAKERDAO_VAT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_VAT.deployed_block, ) frob_event_tx_hashes = [x['transactionHash'] for x in frob_events] gemjoin = GEMJOIN_MAPPING.get(vault.collateral_type, None) if gemjoin is None: self.msg_aggregator.add_warning( f'Unknown makerdao vault collateral type detected {vault.collateral_type}.' 'Skipping ...', ) return None vault_events = [] # Get the collateral deposit events argument_filters = { 'sig': '0x3b4da69f', # join # In cases where a CDP has been migrated from a SAI CDP to a DAI # Vault the usr in the first deposit will be the old address. To # detect the first deposit in these cases we need to check for # arg1 being the urn # 'usr': proxy, 'arg1': address_to_bytes32(urn), } events = self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, ) # all subsequent deposits should have the proxy as a usr # but for non-migrated CDPS the previous query would also work # so in those cases we will have the first deposit 2 times argument_filters = { 'sig': '0x3b4da69f', # join 'usr': proxy, } events.extend( self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, )) deposit_tx_hashes = set() for event in events: tx_hash = event['transactionHash'] if tx_hash in deposit_tx_hashes: # Skip duplicate deposit that would be detected in non migrated CDP case continue if tx_hash not in frob_event_tx_hashes: # If there is no corresponding frob event then skip continue deposit_tx_hashes.add(tx_hash) amount = asset_normalized_value( amount=hexstr_to_int(event['topics'][3]), asset=vault.collateral_asset, ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral deposit', ) vault_events.append( VaultEvent( event_type=VaultEventType.DEPOSIT_COLLATERAL, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=tx_hash, )) # Get the collateral withdrawal events argument_filters = { 'sig': '0xef693bed', # exit 'usr': proxy, } events = self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, ) for event in events: tx_hash = event['transactionHash'] if tx_hash not in frob_event_tx_hashes: # If there is no corresponding frob event then skip continue amount = asset_normalized_value( amount=hexstr_to_int(event['topics'][3]), asset=vault.collateral_asset, ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral withdrawal', ) vault_events.append( VaultEvent( event_type=VaultEventType.WITHDRAW_COLLATERAL, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) total_dai_wei = 0 # Get the dai generation events argument_filters = { 'sig': '0xbb35783b', # move 'arg1': address_to_bytes32(urn), # For CDPs that were created by migrating from SAI the first DAI generation # during vault creation will have the old owner as arg2. So we can't # filter for it here. Still seems like the urn as arg1 is sufficient # 'arg2': address_to_bytes32(proxy), } events = self.ethereum.get_logs( contract_address=MAKERDAO_VAT.address, abi=MAKERDAO_VAT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_VAT.deployed_block, ) for event in events: given_amount = _shift_num_right_by( hexstr_to_int(event['topics'][3]), RAY_DIGITS) total_dai_wei += given_amount amount = token_normalized_value( token_amount=given_amount, token=A_DAI, ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='vault debt generation', ) vault_events.append( VaultEvent( event_type=VaultEventType.GENERATE_DEBT, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) # Get the dai payback events argument_filters = { 'sig': '0x3b4da69f', # join 'usr': proxy, 'arg1': address_to_bytes32(urn), } events = self.ethereum.get_logs( contract_address=MAKERDAO_DAI_JOIN.address, abi=MAKERDAO_DAI_JOIN.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_DAI_JOIN.deployed_block, ) for event in events: given_amount = hexstr_to_int(event['topics'][3]) total_dai_wei -= given_amount amount = token_normalized_value( token_amount=given_amount, token=A_DAI, ) if amount == ZERO: # it seems there is a zero DAI value transfer from the urn when # withdrawing ETH. So we should ignore these as events continue timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='vault debt payback', ) vault_events.append( VaultEvent( event_type=VaultEventType.PAYBACK_DEBT, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) # Get the liquidation events argument_filters = {'urn': urn} events = self.ethereum.get_logs( contract_address=MAKERDAO_CAT.address, abi=MAKERDAO_CAT.abi, event_name='Bite', argument_filters=argument_filters, from_block=MAKERDAO_CAT.deployed_block, ) sum_liquidation_amount = ZERO sum_liquidation_usd = ZERO for event in events: if isinstance(event['data'], str): lot = event['data'][:66] else: # bytes lot = event['data'][:32] amount = asset_normalized_value( amount=hexstr_to_int(lot), asset=vault.collateral_asset, ) timestamp = self.ethereum.get_event_timestamp(event) sum_liquidation_amount += amount usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral liquidation', ) amount_usd_value = amount * usd_price sum_liquidation_usd += amount_usd_value vault_events.append( VaultEvent( event_type=VaultEventType.LIQUIDATION, value=Balance(amount, amount_usd_value), timestamp=timestamp, tx_hash=event['transactionHash'], )) total_interest_owed = vault.debt.amount - token_normalized_value( token_amount=total_dai_wei, token=A_DAI, ) # sort vault events by timestamp vault_events.sort(key=lambda event: event.timestamp) return MakerDAOVaultDetails( identifier=vault.identifier, collateral_asset=vault.collateral_asset, total_interest_owed=total_interest_owed, creation_ts=creation_ts, total_liquidated=Balance(sum_liquidation_amount, sum_liquidation_usd), events=vault_events, )
def test_query_vaults_wbtc(rotkehlchen_api_server, ethereum_accounts): """Check vault info and details for a vault with WBTC as collateral""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { ethereum_accounts[0]: '0x9684e6C1c7B79868839b27F88bA6d5A176367075', # 8913 } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", )) # That proxy has 3 vaults. We only want to test 8913, which is closed/repaid so just keep that vaults = [ x for x in assert_proper_response_with_result(response) if x['identifier'] == 8913 ] vault_8913 = MakerdaoVault( identifier=8913, owner=ethereum_accounts[0], collateral_type='WBTC-A', urn='0x37f7B3C82A9Edc13FdCcE66E7d500b3698A13294', collateral_asset=A_WBTC, collateral=Balance(ZERO, ZERO), debt=Balance(ZERO, ZERO), collateralization_ratio=None, liquidation_ratio=FVal(1.5), liquidation_price=None, stability_fee=FVal(0.02), ) expected_vaults = [vault_8913.serialize()] assert_serialized_lists_equal(expected_vaults, vaults, ignore_keys=['stability_fee']) # And also make sure that the internal mapping will only query details of 8913 makerdao_vaults = rotki.chain_manager.get_module('makerdao_vaults') makerdao_vaults.vault_mappings = {ethereum_accounts[0]: [vault_8913]} response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", )) vault_8913_details = { 'identifier': 8913, 'collateral_asset': 'WBTC', 'creation_ts': 1588664698, 'total_interest_owed': '0.1903819198', 'total_liquidated': { 'amount': '0', 'usd_value': '0', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '0.011', 'usd_value': '87.06599', }, 'timestamp': 1588664698, 'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59', }, { 'event_type': 'generate', 'value': { 'amount': '25', 'usd_value': '25.15', }, 'timestamp': 1588664698, 'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59', }, { 'event_type': 'payback', 'value': { 'amount': '25.000248996', 'usd_value': '25.15025', }, 'timestamp': 1588696496, 'tx_hash': '0x8bd960e7eb8b9e2b81d2446d1844dd63f94636c7800ea5e3b4d926ea0244c66c', }, { 'event_type': 'deposit', 'value': { 'amount': '0.0113', 'usd_value': '89.440517', }, 'timestamp': 1588720248, 'tx_hash': '0x678c4da562173c102473f1904ff293a767ebac9ec6c7d728ef2fd41acf00a13a', }], # way too many events in the vault, so no need to check them all } details = assert_proper_response_with_result(response) assert len(details) == 1 assert_serialized_dicts_equal( details[0], vault_8913_details, # Checking only the first 4 events length_list_keymap={'events': 4}, )
def test_query_vaults_usdc_strange(rotkehlchen_api_server, ethereum_accounts): """Strange case of a USDC vault that is not queried correctly https://oasis.app/borrow/7538?network=mainnet """ rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { ethereum_accounts[0]: '0x15fEaFd4358b8C03c889D6661b0CA1Be3389792F', # 7538 } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", )) # That proxy has 3 vaults. We only want to test 7538, which is closed/repaid so just keep that vaults = [ x for x in assert_proper_response_with_result(response) if x['identifier'] == 7538 ] vault_7538 = MakerdaoVault( identifier=7538, owner=ethereum_accounts[0], collateral_type='USDC-A', urn='0x70E58566C7baB6faaFE03fbA69DF45Ef4f48223B', collateral_asset=A_USDC, collateral=Balance(ZERO, ZERO), debt=Balance(ZERO, ZERO), collateralization_ratio=None, liquidation_ratio=FVal(1.1), liquidation_price=None, ) expected_vaults = [vault_7538.serialize()] assert_serialized_lists_equal(expected_vaults, vaults) # And also make sure that the internal mapping will only query details of 7538 makerdao_vaults = rotki.chain_manager.get_module('makerdao_vaults') makerdao_vaults.vault_mappings = {ethereum_accounts[0]: [vault_7538]} response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", )) vault_7538_details = { 'identifier': 7538, 'collateral_asset': 'USDC', 'creation_ts': 1585145754, 'total_interest_owed': '0.0005943266', 'total_liquidated': { 'amount': '0', 'usd_value': '0', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '250.12', 'usd_value': '250.12', }, 'timestamp': 1588664698, 'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59', }, { 'event_type': 'generate', 'value': { 'amount': '25', 'usd_value': '25', }, 'timestamp': 1588664698, 'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59', }, { 'event_type': 'payback', 'value': { 'amount': '25.000248996', 'usd_value': '25.000248996', }, 'timestamp': 1588696496, 'tx_hash': '0x8bd960e7eb8b9e2b81d2446d1844dd63f94636c7800ea5e3b4d926ea0244c66c', }, { 'event_type': 'deposit', 'value': { 'amount': '0.0113', 'usd_value': '0.0113', }, 'timestamp': 1588720248, 'tx_hash': '0x678c4da562173c102473f1904ff293a767ebac9ec6c7d728ef2fd41acf00a13a', }], } details = assert_proper_response_with_result(response) expected_details = [vault_7538_details] assert_serialized_lists_equal(expected_details, details)
}, 'user': { 'id': TEST_ADDRESS_2, }, } # Expected <LiquidityPool> for LIQUIDITY_POSITION_1 EXP_LIQUIDITY_POOL_1 = ( LiquidityPool( address=string_to_ethereum_address('0x260E069deAd76baAC587B5141bB606Ef8b9Bab6c'), assets=[ LiquidityPoolAsset( asset=A_SHL, total_amount=FVal('135433.787685858453561892'), user_balance=Balance( amount=FVal('2486.554982222884623101272349'), usd_value=ZERO, ), usd_price=Price(ZERO), ), LiquidityPoolAsset( asset=A_WETH, total_amount=FVal('72.576018267058292417'), user_balance=Balance( amount=FVal('1.332490679729371260856256139'), usd_value=ZERO, ), usd_price=Price(ZERO), ), ], total_supply=FVal('2885.30760350854829554'), user_balance=Balance(
def _get_vault_withdraw_events( self, vault: YearnVault, address: ChecksumEthAddress, from_block: int, to_block: int, ) -> List[YearnVaultEvent]: """Get all withdraw events of the underlying token to the vault""" events: List[YearnVaultEvent] = [] argument_filters = {'from': vault.contract.address, 'to': address} withdraw_events = self.ethereum.get_logs( contract_address=vault.underlying_token.ethereum_address, abi=ERC20TOKEN_ABI, event_name='Transfer', argument_filters=argument_filters, from_block=from_block, to_block=to_block, ) for withdraw_event in withdraw_events: timestamp = self.ethereum.get_event_timestamp(withdraw_event) withdraw_amount = token_normalized_value( token_amount=hex_or_bytes_to_int(withdraw_event['data']), token=vault.token, ) tx_hash = withdraw_event['transactionHash'] tx_receipt = self.ethereum.get_transaction_receipt(tx_hash) withdraw_index = deserialize_int_from_hex_or_int( withdraw_event['logIndex'], 'yearn withdraw log index', ) burn_amount = None for log in tx_receipt['logs']: found_event = ( log['topics'][0] == '0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef' and # noqa: E501 log['topics'][1] == address_to_bytes32(address) and log['topics'][2] == address_to_bytes32(ZERO_ADDRESS)) if found_event: # found the burn log burn_amount = token_normalized_value( token_amount=hex_or_bytes_to_int(log['data']), token=vault.token, ) if burn_amount is None: self.msg_aggregator.add_error( f'Ignoring yearn withdraw event with tx_hash {tx_hash} and log index ' f'{withdraw_index} due to inability to find corresponding burn event', ) continue withdraw_usd_price = get_usd_price_zero_if_error( asset=vault.underlying_token, time=timestamp, location='yearn vault withdraw', msg_aggregator=self.msg_aggregator, ) burn_usd_price = get_usd_price_zero_if_error( asset=vault.token, time=timestamp, location='yearn vault withdraw', msg_aggregator=self.msg_aggregator, ) events.append( YearnVaultEvent( event_type='withdraw', block_number=deserialize_blocknumber( withdraw_event['blockNumber']), timestamp=timestamp, from_asset=vault.token, from_value=Balance( amount=burn_amount, usd_value=burn_amount * burn_usd_price, ), to_asset=vault.underlying_token, to_value=Balance( amount=withdraw_amount, usd_value=withdraw_amount * withdraw_usd_price, ), realized_pnl=None, tx_hash=tx_hash, log_index=withdraw_index, )) return events
def get_trove_history( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> Dict[ChecksumEthAddress, List[LiquityEvent]]: addresses_to_query = list(addresses) proxied_addresses = self._get_accounts_having_proxy() proxies_to_address = {v: k for k, v in proxied_addresses.items()} addresses_to_query += proxied_addresses.values() try: query = self._get_raw_history(addresses_to_query, 'trove') except RemoteError as e: log.error( f'Failed to query trove graph events for liquity. {str(e)}') query = {} result: Dict[ChecksumEthAddress, List[LiquityEvent]] = defaultdict(list) for trove in query.get('troves', []): owner = to_checksum_address(trove['owner']['id']) if owner in proxies_to_address: owner = proxies_to_address[owner] for change in trove['changes']: try: timestamp = change['transaction']['timestamp'] if timestamp < from_timestamp: continue if timestamp > to_timestamp: break operation = TroveOperation.deserialize( change['troveOperation']) collateral_change = deserialize_optional_to_fval( value=change['collateralChange'], name='collateralChange', location='liquity', ) debt_change = deserialize_optional_to_fval( value=change['debtChange'], name='debtChange', location='liquity', ) lusd_price = PriceHistorian().query_historical_price( from_asset=A_LUSD, to_asset=A_USD, timestamp=timestamp, ) eth_price = PriceHistorian().query_historical_price( from_asset=A_ETH, to_asset=A_USD, timestamp=timestamp, ) debt_after_amount = deserialize_optional_to_fval( value=change['debtAfter'], name='debtAfter', location='liquity', ) collateral_after_amount = deserialize_optional_to_fval( value=change['collateralAfter'], name='collateralAfter', location='liquity', ) event = LiquityTroveEvent( kind='trove', tx=change['transaction']['id'], address=owner, timestamp=timestamp, debt_after=AssetBalance( asset=A_LUSD, balance=Balance( amount=debt_after_amount, usd_value=lusd_price * debt_after_amount, ), ), collateral_after=AssetBalance( asset=A_ETH, balance=Balance( amount=collateral_after_amount, usd_value=eth_price * collateral_after_amount, ), ), debt_delta=AssetBalance( asset=A_LUSD, balance=Balance( amount=debt_change, usd_value=lusd_price * debt_change, ), ), collateral_delta=AssetBalance( asset=A_ETH, balance=Balance( amount=collateral_change, usd_value=eth_price * collateral_change, ), ), trove_operation=operation, sequence_number=str(change['sequenceNumber']), ) result[owner].append(event) except (DeserializationError, KeyError) as e: log.debug( f'Failed to deserialize Liquity trove event: {change}') msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' self.msg_aggregator.add_warning( f'Ignoring Liquity Trove event in Liquity. ' f'Failed to decode remote information. {msg}.', ) continue return result
def get_staking_history( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> Dict[ChecksumEthAddress, List[LiquityEvent]]: try: staked = self._get_raw_history(addresses, 'stake') except RemoteError as e: log.error( f'Failed to query stake graph events for liquity. {str(e)}') staked = {} result: Dict[ChecksumEthAddress, List[LiquityEvent]] = defaultdict(list) for stake in staked.get('lqtyStakes', []): owner = to_checksum_address(stake['id']) for change in stake['changes']: try: timestamp = change['transaction']['timestamp'] if timestamp < from_timestamp: continue if timestamp > to_timestamp: break operation_stake = LiquityStakeEventType.deserialize( change['stakeOperation']) lqty_price = PriceHistorian().query_historical_price( from_asset=A_LQTY, to_asset=A_USD, timestamp=timestamp, ) lusd_price = PriceHistorian().query_historical_price( from_asset=A_LUSD, to_asset=A_USD, timestamp=timestamp, ) stake_after = deserialize_optional_to_fval( value=change['stakedAmountAfter'], name='stakedAmountAfter', location='liquity', ) stake_change = deserialize_optional_to_fval( value=change['stakedAmountChange'], name='stakedAmountChange', location='liquity', ) issuance_gain = deserialize_optional_to_fval( value=change['issuanceGain'], name='issuanceGain', location='liquity', ) redemption_gain = deserialize_optional_to_fval( value=change['redemptionGain'], name='redemptionGain', location='liquity', ) stake_event = LiquityStakeEvent( kind='stake', tx=change['transaction']['id'], address=owner, timestamp=timestamp, stake_after=AssetBalance( asset=A_LQTY, balance=Balance( amount=stake_after, usd_value=lqty_price * stake_after, ), ), stake_change=AssetBalance( asset=A_LQTY, balance=Balance( amount=stake_change, usd_value=lqty_price * stake_change, ), ), issuance_gain=AssetBalance( asset=A_LUSD, balance=Balance( amount=issuance_gain, usd_value=lusd_price * issuance_gain, ), ), redemption_gain=AssetBalance( asset=A_LUSD, balance=Balance( amount=redemption_gain, usd_value=lusd_price * redemption_gain, ), ), stake_operation=operation_stake, sequence_number=str( change['transaction']['sequenceNumber']), ) result[owner].append(stake_event) except (DeserializationError, KeyError) as e: msg = str(e) log.debug(f'Failed to deserialize Liquity entry: {change}') if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' self.msg_aggregator.add_warning( f'Ignoring Liquity Stake event in Liquity. ' f'Failed to decode remote information. {msg}.', ) continue return result
def get_positions( self, addresses_list: List[ChecksumEthAddress], ) -> Dict[ChecksumEthAddress, Trove]: contract = EthereumContract( address=LIQUITY_TROVE_MANAGER.address, abi=LIQUITY_TROVE_MANAGER.abi, deployed_block=LIQUITY_TROVE_MANAGER.deployed_block, ) # make a copy of the list to avoid modifications in the list that is passed as argument addresses = list(addresses_list) proxied_addresses = self._get_accounts_having_proxy() proxies_to_address = {v: k for k, v in proxied_addresses.items()} addresses += proxied_addresses.values() calls = [(LIQUITY_TROVE_MANAGER.address, contract.encode(method_name='Troves', arguments=[x])) for x in addresses] outputs = multicall_2( ethereum=self.ethereum, require_success=False, calls=calls, ) data: Dict[ChecksumEthAddress, Trove] = {} eth_price = Inquirer().find_usd_price(A_ETH) lusd_price = Inquirer().find_usd_price(A_LUSD) for idx, output in enumerate(outputs): status, result = output if status is True: try: trove_info = contract.decode(result, 'Troves', arguments=[addresses[idx]]) trove_is_active = bool(trove_info[3]) # pylint: disable=unsubscriptable-object if not trove_is_active: continue collateral = deserialize_asset_amount( token_normalized_value_decimals(trove_info[1], 18), # noqa: E501 pylint: disable=unsubscriptable-object ) debt = deserialize_asset_amount( token_normalized_value_decimals(trove_info[0], 18), # noqa: E501 pylint: disable=unsubscriptable-object ) collateral_balance = AssetBalance( asset=A_ETH, balance=Balance( amount=collateral, usd_value=eth_price * collateral, ), ) debt_balance = AssetBalance( asset=A_LUSD, balance=Balance( amount=debt, usd_value=lusd_price * debt, ), ) # Avoid division errors collateralization_ratio: Optional[FVal] liquidation_price: Optional[FVal] if debt > 0: collateralization_ratio = eth_price * collateral / debt * 100 else: collateralization_ratio = None if collateral > 0: liquidation_price = debt * lusd_price * FVal( MIN_COLL_RATE) / collateral else: liquidation_price = None account_address = addresses[idx] if account_address in proxies_to_address: account_address = proxies_to_address[account_address] data[account_address] = Trove( collateral=collateral_balance, debt=debt_balance, collateralization_ratio=collateralization_ratio, liquidation_price=liquidation_price, active=trove_is_active, trove_id=trove_info[4], # pylint: disable=unsubscriptable-object ) except DeserializationError as e: self.msg_aggregator.add_warning( f'Ignoring Liquity trove information. ' f'Failed to decode contract information. {str(e)}.', ) return data
def _process_trove_events( self, changes: List[Dict[str, Any]], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> List[DefiEvent]: events = [] total_lusd_trove_balance = Balance() realized_trove_lusd_loss = Balance() for change in changes: try: operation = TroveOperation.deserialize( change['troveOperation']) collateral_change = deserialize_asset_amount( change['collateralChange']) debt_change = deserialize_asset_amount(change['debtChange']) timestamp = change['transaction']['timestamp'] if timestamp < from_timestamp: continue if timestamp > to_timestamp: break got_asset: Optional[Asset] spent_asset: Optional[Asset] pnl = got_asset = got_balance = spent_asset = spent_balance = None count_spent_got_cost_basis = False # In one transaction it is possible to generate debt and change the collateral if debt_change != AssetAmount(ZERO): if debt_change > ZERO: # Generate debt count_spent_got_cost_basis = True got_asset = A_LUSD got_balance = Balance( amount=debt_change, usd_value=query_usd_price_or_use_default( asset=A_LUSD, time=timestamp, default_value=ZERO, location='Liquity', ), ) total_lusd_trove_balance += got_balance else: # payback debt count_spent_got_cost_basis = True spent_asset = A_LUSD spent_balance = Balance( amount=abs(debt_change), usd_value=query_usd_price_or_use_default( asset=A_LUSD, time=timestamp, default_value=ZERO, location='Liquity', ), ) total_lusd_trove_balance -= spent_balance balance = total_lusd_trove_balance.amount + realized_trove_lusd_loss.amount if balance < ZERO: pnl_balance = total_lusd_trove_balance + realized_trove_lusd_loss realized_trove_lusd_loss += -pnl_balance pnl = [ AssetBalance(asset=A_LUSD, balance=pnl_balance) ] if collateral_change != AssetAmount(ZERO): if collateral_change < ZERO: # Withdraw collateral got_asset = A_ETH got_balance = Balance( amount=abs(collateral_change), usd_value=query_usd_price_or_use_default( asset=A_ETH, time=timestamp, default_value=ZERO, location='Liquity', ), ) else: # Deposit collateral spent_asset = A_ETH spent_balance = Balance( amount=collateral_change, usd_value=query_usd_price_or_use_default( asset=A_ETH, time=timestamp, default_value=ZERO, location='Liquity', ), ) if operation in ( TroveOperation.LIQUIDATEINNORMALMODE, TroveOperation.LIQUIDATEINRECOVERYMODE, ): count_spent_got_cost_basis = True spent_asset = A_ETH spent_balance = Balance( amount=abs(collateral_change), usd_value=query_usd_price_or_use_default( asset=A_ETH, time=timestamp, default_value=ZERO, location='Liquity', ), ) pnl = [AssetBalance(asset=A_ETH, balance=-spent_balance)] event = DefiEvent( timestamp=Timestamp(change['transaction']['timestamp']), wrapped_event=change, event_type=DefiEventType.LIQUITY, got_asset=got_asset, got_balance=got_balance, spent_asset=spent_asset, spent_balance=spent_balance, pnl=pnl, count_spent_got_cost_basis=count_spent_got_cost_basis, tx_hash=change['transaction']['id'], ) events.append(event) except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' log.debug( f'Failed to extract defievent in Liquity from {change}') self.msg_aggregator.add_warning( f'Ignoring Liquity Trove event in Liquity. ' f'Failed to decode remote information. {msg}.', ) continue return events
def query_balances(self) -> ExchangeQueryBalances: """Return the account exchange balances on Bitfinex The wallets endpoint returns a list where each item is a currency wallet. Each currency wallet has type (i.e. exchange, margin, funding), currency, balance, etc. Currencies (tickers) are in Bitfinex format and must be standardized. Endpoint documentation: https://docs.bitfinex.com/reference#rest-auth-wallets """ self.first_connection() response = self._api_query('wallets') if response.status_code != HTTPStatus.OK: result, msg = self._process_unsuccessful_response( response=response, case='balances', ) return result, msg try: response_list = jsonloads_list(response.text) except JSONDecodeError as e: msg = f'{self.name} returned invalid JSON response: {response.text}.' log.error(msg) raise RemoteError(msg) from e # Wallet items indices currency_index = 1 balance_index = 2 assets_balance: DefaultDict[Asset, Balance] = defaultdict(Balance) for wallet in response_list: if len(wallet) < API_WALLET_MIN_RESULT_LENGTH: log.error( f'Error processing a {self.name} balance result. ' f'Found less items than expected', wallet=wallet, ) self.msg_aggregator.add_error( f'Failed to deserialize a {self.name} balance result. ' f'Check logs for details. Ignoring it.', ) continue if wallet[balance_index] <= 0: continue # bitfinex can show small negative balances for some coins. Ignore try: asset = asset_from_bitfinex( bitfinex_name=wallet[currency_index], currency_map=self.currency_map, ) except (UnknownAsset, UnsupportedAsset) as e: asset_tag = 'unknown' if isinstance(e, UnknownAsset) else 'unsupported' self.msg_aggregator.add_warning( f'Found {asset_tag} {self.name} asset {e.asset_name} due to: {str(e)}. ' f'Ignoring its balance query.', ) continue try: usd_price = Inquirer().find_usd_price(asset=asset) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing {self.name} {asset.name} balance result due to inability ' f'to query USD price: {str(e)}. Skipping balance result.', ) continue try: amount = deserialize_asset_amount(wallet[balance_index]) except DeserializationError as e: self.msg_aggregator.add_error( f'Error processing {self.name} {asset.name} balance result due to inability ' f'to deserialize asset amount due to {str(e)}. Skipping balance result.', ) continue assets_balance[asset] += Balance( amount=amount, usd_value=amount * usd_price, ) return dict(assets_balance), ''
def get_events_for_atoken_and_address( self, user_address: ChecksumEthAddress, atoken: EthereumToken, deposit_events: List[Dict[str, Any]], withdraw_events: List[Dict[str, Any]], from_block: int, to_block: int, ) -> List[AaveEvent]: """This function should be entered while holding the history_lock semaphore""" argument_filters = { 'from': ZERO_ADDRESS, 'to': user_address, } mint_events = self.ethereum.get_logs( contract_address=atoken.ethereum_address, abi=ATOKEN_ABI, event_name='Transfer', argument_filters=argument_filters, from_block=from_block, to_block=to_block, ) mint_data = set() mint_data_to_log_index = {} for event in mint_events: amount = hex_or_bytes_to_int(event['data']) if amount == 0: continue # first mint can be for 0. Ignore entry = ( deserialize_blocknumber(event['blockNumber']), amount, self.ethereum.get_event_timestamp(event), event['transactionHash'], ) mint_data.add(entry) mint_data_to_log_index[entry] = deserialize_int_from_hex_or_int( event['logIndex'], 'aave log index', ) reserve_asset = _atoken_to_reserve_asset(atoken) reserve_address, decimals = _get_reserve_address_decimals( reserve_asset.identifier) aave_events = [] for event in deposit_events: if hex_or_bytes_to_address(event['topics'][1]) == reserve_address: # first 32 bytes of the data are the amount deposit = hex_or_bytes_to_int(event['data'][:66]) block_number = deserialize_blocknumber(event['blockNumber']) timestamp = self.ethereum.get_event_timestamp(event) tx_hash = event['transactionHash'] log_index = deserialize_int_from_hex_or_int( event['logIndex'], 'aave log index') # If there is a corresponding deposit event remove the minting event data entry = (block_number, deposit, timestamp, tx_hash) if entry in mint_data: mint_data.remove(entry) del mint_data_to_log_index[entry] usd_price = query_usd_price_zero_if_error( asset=reserve_asset, time=timestamp, location='aave deposit', msg_aggregator=self.msg_aggregator, ) deposit_amount = deposit / (FVal(10)**FVal(decimals)) aave_events.append( AaveEvent( event_type='deposit', asset=reserve_asset, value=Balance( amount=deposit_amount, usd_value=deposit_amount * usd_price, ), block_number=block_number, timestamp=timestamp, tx_hash=tx_hash, log_index=log_index, )) for data in mint_data: usd_price = query_usd_price_zero_if_error( asset=atoken, time=data[2], location='aave interest profit', msg_aggregator=self.msg_aggregator, ) interest_amount = data[1] / (FVal(10)**FVal(decimals)) aave_events.append( AaveEvent( event_type='interest', asset=atoken, value=Balance( amount=interest_amount, usd_value=interest_amount * usd_price, ), block_number=data[0], timestamp=data[2], tx_hash=data[3], log_index=mint_data_to_log_index[data], )) for event in withdraw_events: if hex_or_bytes_to_address(event['topics'][1]) == reserve_address: # first 32 bytes of the data are the amount withdrawal = hex_or_bytes_to_int(event['data'][:66]) block_number = deserialize_blocknumber(event['blockNumber']) timestamp = self.ethereum.get_event_timestamp(event) tx_hash = event['transactionHash'] usd_price = query_usd_price_zero_if_error( asset=reserve_asset, time=timestamp, location='aave withdrawal', msg_aggregator=self.msg_aggregator, ) withdrawal_amount = withdrawal / (FVal(10)**FVal(decimals)) aave_events.append( AaveEvent( event_type='withdrawal', asset=reserve_asset, value=Balance( amount=withdrawal_amount, usd_value=withdrawal_amount * usd_price, ), block_number=block_number, timestamp=timestamp, tx_hash=tx_hash, log_index=deserialize_int_from_hex_or_int( event['logIndex'], 'aave log index'), )) return aave_events
def get_history_for_address( self, user_address: ChecksumEthAddress, to_block: int, atokens_list: Optional[List[EthereumToken]] = None, given_from_block: Optional[int] = None, ) -> AaveHistory: """ Queries aave history for a single address. This function should be entered while holding the history_lock semaphore """ # Get all deposit events for the address from_block = AAVE_LENDING_POOL.deployed_block if given_from_block is None else given_from_block # noqa: E501 argument_filters = { '_user': user_address, } query_events = True if given_from_block is not None and to_block - given_from_block < MAX_BLOCKTIME_CACHE: # noqa: E501 query_events = False # Save time by not querying events if last query is recent deposit_events = [] withdraw_events = [] if query_events: deposit_events.extend( self.ethereum.get_logs( contract_address=AAVE_LENDING_POOL.address, abi=AAVE_LENDING_POOL.abi, event_name='Deposit', argument_filters=argument_filters, from_block=from_block, to_block=to_block, )) withdraw_events.extend( self.ethereum.get_logs( contract_address=AAVE_LENDING_POOL.address, abi=AAVE_LENDING_POOL.abi, event_name='RedeemUnderlying', argument_filters=argument_filters, from_block=from_block, to_block=to_block, )) # now for each atoken get all mint events and pass then to profit calculation tokens = atokens_list if atokens_list is not None else ATOKENS_LIST total_address_events = [] total_earned_map = {} for token in tokens: log.debug( f'Querying aave events for {user_address} and token ' f'{token.identifier} with query_events={query_events}', ) events = [] if given_from_block: events.extend( self.database.get_aave_events(user_address, token)) new_events = [] if query_events: new_events = self.get_events_for_atoken_and_address( user_address=user_address, atoken=token, deposit_events=deposit_events, withdraw_events=withdraw_events, from_block=from_block, to_block=to_block, ) events.extend(new_events) total_balance = Balance() for x in events: if x.event_type == 'interest': total_balance += x.value # If the user still has balance in Aave we also need to see how much # accrued interest has not been yet paid out # TODO: ARCHIVE if to_block is not latest here we should get the balance # from the old block. Means using archive node balance = self.ethereum.call_contract( contract_address=token.ethereum_address, abi=ATOKEN_ABI, method_name='balanceOf', arguments=[user_address], ) principal_balance = self.ethereum.call_contract( contract_address=token.ethereum_address, abi=ATOKEN_ABI, method_name='principalBalanceOf', arguments=[user_address], ) if len(events) == 0 and balance == 0 and principal_balance == 0: # Nothing for this aToken for this address continue unpaid_interest = (balance - principal_balance) / (FVal(10)**FVal( token.decimals)) usd_price = Inquirer().find_usd_price(token) total_balance += Balance( amount=unpaid_interest, usd_value=unpaid_interest * usd_price, ) total_earned_map[token] = total_balance total_address_events.extend(events) # now update the DB with the recently queried events self.database.add_aave_events(user_address, new_events) # After all events have been queried then also update the query range. # Even if no events are found for an address we need to remember the range self.database.update_used_block_query_range( name=f'aave_events_{user_address}', from_block=AAVE_LENDING_POOL.deployed_block, to_block=to_block, ) total_address_events.sort(key=lambda event: event.timestamp) return AaveHistory(events=total_address_events, total_earned=total_earned_map)
def _get_lend_events( self, event_type: Literal['mint', 'redeem'], address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: param_types, param_values = get_common_params(from_ts, to_ts, address) if event_type == 'mint': graph_event_name = 'mintEvents' addr_position = 'to' elif event_type == 'redeem': graph_event_name = 'redeemEvents' addr_position = 'from' result = self.graph.query( # type: ignore querystr=LEND_EVENTS_QUERY_PREFIX.format( graph_event_name=graph_event_name, addr_position=addr_position, ), param_types=param_types, param_values=param_values, ) events = [] for entry in result[graph_event_name]: ctoken_symbol = entry['cTokenSymbol'] try: ctoken_asset = Asset(ctoken_symbol) except UnknownAsset: log.error( f'Found unexpected cTokenSymbol {ctoken_symbol} during graph query. Skipping.' ) continue underlying_symbol = ctoken_symbol[1:] try: underlying_asset = Asset(underlying_symbol) except UnknownAsset: log.error( f'Found unexpected token symbol {underlying_symbol} during ' f'graph query. Skipping.', ) continue timestamp = entry['blockTime'] usd_price = query_usd_price_zero_if_error( asset=underlying_asset, time=timestamp, location=f'compound {event_type}', msg_aggregator=self.msg_aggregator, ) underlying_amount = FVal(entry['underlyingAmount']) usd_value = underlying_amount * usd_price parse_result = _get_txhash_and_logidx(entry['id']) if parse_result is None: log.error( f'Found unprocessable mint id from the graph {entry["id"]}. Skipping' ) continue amount = FVal(entry['amount']) if event_type == 'mint': from_value = Balance(amount=underlying_amount, usd_value=usd_value) to_value = Balance(amount=amount, usd_value=usd_value) from_asset = underlying_asset to_asset = ctoken_asset else: # redeem from_value = Balance(amount=amount, usd_value=usd_value) to_value = Balance(amount=underlying_amount, usd_value=usd_value) from_asset = ctoken_asset to_asset = underlying_asset events.append( CompoundEvent( event_type=event_type, address=address, block_number=entry['blockNumber'], timestamp=timestamp, asset=from_asset, value=from_value, to_asset=to_asset, to_value=to_value, realized_pnl=None, tx_hash=parse_result[0], log_index=parse_result[1], )) return events
def query_balances(self) -> ExchangeQueryBalances: try: resp = self.api_query('balances') except RemoteError as e: msg = ('Bittrex API request failed. Could not reach bittrex due ' 'to {}'.format(e)) log.error(msg) return None, msg returned_balances: Dict[Asset, Balance] = {} for entry in resp: try: asset = asset_from_bittrex(entry['currencySymbol']) amount = deserialize_asset_amount(entry['total']) except UnsupportedAsset as e: self.msg_aggregator.add_warning( f'Found unsupported bittrex asset {e.asset_name}. ' f' Ignoring its balance query.', ) continue except UnknownAsset as e: self.msg_aggregator.add_warning( f'Found unknown bittrex asset {e.asset_name}. ' f' Ignoring its balance query.', ) continue except DeserializationError as e: msg = ( f'Failed to parse bittrex balance entry due to {str(e)}.' f' Ignoring its balance query. Check logs for more details' ) self.msg_aggregator.add_error(msg) log.error( 'Error processing a bittrex balance entry', entry=entry, error=msg, ) continue if entry['currencySymbol'] == 'BTXCRD': # skip BTXCRD balance, since it's bittrex internal and we can't query usd price continue try: usd_price = Inquirer().find_usd_price(asset=asset) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing bittrex balance entry due to inability to ' f'query USD price: {str(e)}. Skipping balance entry', ) continue usd_value = amount * usd_price returned_balances[asset] = Balance( amount=amount, usd_value=usd_value, ) log.debug( 'bittrex balance query result', currency=asset, amount=amount, usd_value=usd_value, ) return returned_balances, ''
def _process_events( self, events: List[CompoundEvent], given_defi_balances: GIVEN_DEFI_BALANCES, ) -> Tuple[ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS]: """Processes all events and returns a dictionary of earned balances totals""" assets: ADDRESS_TO_ASSETS = defaultdict(lambda: defaultdict(Balance)) loss_assets: ADDRESS_TO_ASSETS = defaultdict( lambda: defaultdict(Balance)) rewards_assets: ADDRESS_TO_ASSETS = defaultdict( lambda: defaultdict(Balance)) profit_so_far: ADDRESS_TO_ASSETS = defaultdict( lambda: defaultdict(Balance)) loss_so_far: ADDRESS_TO_ASSETS = defaultdict( lambda: defaultdict(Balance)) liquidation_profit: ADDRESS_TO_ASSETS = defaultdict( lambda: defaultdict(Balance)) balances = self.get_balances(given_defi_balances) for idx, event in enumerate(events): if event.event_type == 'mint': assets[event.address][event.asset] -= event.value elif event.event_type == 'redeem': assert event.to_asset, 'redeem events should have a to_asset' assert event.to_value, 'redeem events should have a to_value' profit_amount = ( assets[event.address][event.to_asset].amount + event.to_value.amount - profit_so_far[event.address][event.to_asset].amount) profit: Optional[Balance] if profit_amount >= 0: usd_price = query_usd_price_zero_if_error( asset=event.to_asset, time=event.timestamp, location='comp redeem event processing', msg_aggregator=self.msg_aggregator, ) profit = Balance(profit_amount, profit_amount * usd_price) profit_so_far[event.address][event.to_asset] += profit else: profit = None assets[event.address][event.to_asset] += event.to_value events[idx] = event._replace( realized_pnl=profit) # TODO: maybe not named tuple? elif event.event_type == 'borrow': loss_assets[event.address][event.asset] -= event.value elif event.event_type == 'repay': loss_amount = (loss_assets[event.address][event.asset].amount + event.value.amount - loss_so_far[event.address][event.asset].amount) loss: Optional[Balance] if loss_amount >= 0: usd_price = query_usd_price_zero_if_error( asset=event.asset, time=event.timestamp, location='comp repay event processing', msg_aggregator=self.msg_aggregator, ) loss = Balance(loss_amount, loss_amount * usd_price) loss_so_far[event.address][event.asset] += loss else: loss = None loss_assets[event.address][event.asset] += event.value events[idx] = event._replace( realized_pnl=loss) # TODO: maybe not named tuple? elif event.event_type == 'liquidation': assert event.to_asset, 'liquidation events should have a to_asset' # Liquidator covers part of the borrowed amount loss_assets[event.address][event.asset] += event.value liquidation_profit[event.address][event.asset] += event.value # Liquidator receives discounted to_asset loss_assets[event.address][event.to_asset] += event.to_value loss_so_far[event.address][event.to_asset] += event.to_value elif event.event_type == 'comp': rewards_assets[event.address][A_COMP] += event.value for address, bentry in balances.items(): for asset, entry in bentry['lending'].items(): profit_amount = (profit_so_far[address][asset].amount + entry.balance.amount + assets[address][asset].amount) if profit_amount < 0: log.error( f'In compound we calculated negative profit. Should not happen. ' f'address: {address} asset: {asset} ', ) else: usd_price = Inquirer().find_usd_price(Asset(asset)) profit_so_far[address][asset] = Balance( amount=profit_amount, usd_value=profit_amount * usd_price, ) for asset, entry in bentry['borrowing'].items(): remaining = entry.balance + loss_assets[address][asset] if remaining.amount < ZERO: continue loss_so_far[address][asset] += remaining if loss_so_far[address][asset].usd_value < ZERO: amount = loss_so_far[address][asset].amount loss_so_far[address][asset] = Balance( amount=amount, usd_value=amount * Inquirer().find_usd_price(Asset(asset)), ) for asset, entry in bentry['rewards'].items(): rewards_assets[address][asset] += entry.balance return profit_so_far, loss_so_far, liquidation_profit, rewards_assets
'0xfeF0E7635281eF8E3B705e9C5B86e1d3B0eAb397') ADDR2 = deserialize_ethereum_address( '0x00F8a0D8EE1c21151BCcB416bCa1C152f9952D19') ADDR3 = deserialize_ethereum_address( '0x3266F3546a1e5Dc6A15588f3324741A0E20a3B6c') # List of ADDR1, ADDR2 and ADDR3 deposit events from 1604506685 to 1605044577 # sorted by (timestamp, log_index). EXPECTED_DEPOSITS = [ Eth2Deposit( from_address=ADDR1, pubkey= '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b', # noqa: E501 withdrawal_credentials= '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 value=Balance(FVal(32), FVal(64)), deposit_index=9, tx_hash= '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', log_index=22, timestamp=Timestamp(int(1604506685)), ), Eth2Deposit( from_address=ADDR3, pubkey= '0x90b2f65cb43d9cdb2279af9f76010d667b9d8d72e908f2515497a7102820ce6bb15302fe2b8dc082fce9718569344ad8', # noqa: E501 withdrawal_credentials= '0x00a257d19e1650dec1ab59fc9e1cb9a9fc2fe7265b0f27e7d79ff61aeff0a1f0', # noqa: E501 value=Balance(FVal(32), FVal(64)), deposit_index=993, tx_hash=
def query_balances(self) -> ExchangeQueryBalances: try: resp = self.api_query_dict('returnCompleteBalances', {"account": "all"}) except RemoteError as e: msg = ('Poloniex API request failed. Could not reach poloniex due ' 'to {}'.format(e)) log.error(msg) return None, msg assets_balance: Dict[Asset, Balance] = {} for poloniex_asset, v in resp.items(): try: available = deserialize_asset_amount(v['available']) on_orders = deserialize_asset_amount(v['onOrders']) except DeserializationError as e: self.msg_aggregator.add_error( f'Could not deserialize amount from poloniex due to ' f'{str(e)}. Ignoring its balance query.', ) continue if available != ZERO or on_orders != ZERO: try: asset = asset_from_poloniex(poloniex_asset) except UnsupportedAsset as e: self.msg_aggregator.add_warning( f'Found unsupported poloniex asset {e.asset_name}. ' f' Ignoring its balance query.', ) continue except UnknownAsset as e: self.msg_aggregator.add_warning( f'Found unknown poloniex asset {e.asset_name}. ' f' Ignoring its balance query.', ) continue except DeserializationError: log.error( f'Unexpected poloniex asset type. Expected string ' f' but got {type(poloniex_asset)}', ) self.msg_aggregator.add_error( 'Found poloniex asset entry with non-string type. ' ' Ignoring its balance query.', ) continue if asset == A_LEND: # poloniex mistakenly returns LEND balances continue # https://github.com/rotki/rotki/issues/2530 try: usd_price = Inquirer().find_usd_price(asset=asset) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing poloniex balance entry due to inability to ' f'query USD price: {str(e)}. Skipping balance entry', ) continue amount = available + on_orders usd_value = amount * usd_price assets_balance[asset] = Balance( amount=amount, usd_value=usd_value, ) log.debug( 'Poloniex balance query', currency=asset, amount=amount, usd_value=usd_value, ) return assets_balance, ''
def test_eth2_deposits_serialization(): addr1 = make_ethereum_address() addr2 = make_ethereum_address() deposits = [ Eth2Deposit( from_address=addr1, pubkey= '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b', # noqa: E501 withdrawal_credentials= '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 value=Balance(FVal(32), FVal(64)), deposit_index=9, tx_hash= '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', log_index=22, timestamp=Timestamp(int(1604506685)), ), Eth2Deposit( from_address=addr2, pubkey= '0xa8ff5fc88412d080a297683c25a791ef77eb52d75b265fabab1f2c2591bb927c35818ac6289bc6680ab252787d0ebab3', # noqa: E501 withdrawal_credentials= '0x00cfe1c10347d642a8b8daf86d23bcb368076972691445de2cf517ff43765817', # noqa: E501 value=Balance(FVal(32), FVal(64)), deposit_index=1650, tx_hash= '0x6905f4d1843fb8c003c1fbbc2c8e6c5f9792f4f44ddb1122553412ee0b128da7', log_index=221, timestamp=Timestamp(int(1605043544)), ), ] serialized = process_result_list(deposits) assert serialized == [ { 'from_address': addr1, 'pubkey': '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b', # noqa: E501 'withdrawal_credentials': '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 'value': { 'amount': '32', 'usd_value': '64' }, 'deposit_index': 9, 'tx_hash': '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', 'log_index': 22, 'timestamp': 1604506685, }, { 'from_address': addr2, 'pubkey': '0xa8ff5fc88412d080a297683c25a791ef77eb52d75b265fabab1f2c2591bb927c35818ac6289bc6680ab252787d0ebab3', # noqa: E501 'withdrawal_credentials': '0x00cfe1c10347d642a8b8daf86d23bcb368076972691445de2cf517ff43765817', # noqa: E501 'value': { 'amount': '32', 'usd_value': '64' }, 'deposit_index': 1650, 'tx_hash': '0x6905f4d1843fb8c003c1fbbc2c8e6c5f9792f4f44ddb1122553412ee0b128da7', 'log_index': 221, 'timestamp': 1605043544, }, ]
1588463542: FVal('1.009'), 1588430911: FVal('1.009'), 1592175763: FVal('1.013'), 1594502373: FVal('1.019'), }, }, } aave_mocked_current_prices = {A_ADAI_V1: FVal('1.017')} expected_aave_deposit_test_events = [ AaveDepositWithdrawalEvent( event_type='deposit', asset=A_DAI, atoken=A_ADAI_V1, value=Balance( amount=FVal('102.926986169186236436'), usd_value=FVal('104.367963975554843746104'), ), block_number=9963767, timestamp=Timestamp(1588114293), tx_hash='0x8b72307967c4f7a486c1cb1b6ebca5e549de06e02930ece0399e2096f1a132c5', log_index=72, ), AaveDepositWithdrawalEvent( event_type='deposit', asset=A_DAI, atoken=A_ADAI_V1, value=Balance( amount=FVal('160'), usd_value=FVal('161.440'), ), block_number=9987395, timestamp=Timestamp(1588430911),
def _query_spot_balances( self, balances: DefaultDict[Asset, Balance], ) -> DefaultDict[Asset, Balance]: account_data = self.api_query_dict('api', 'account') binance_balances = account_data.get('balances', None) if not binance_balances: raise RemoteError( 'Binance spot balances response did not contain the balances key' ) for entry in binance_balances: try: # force string https://github.com/rotki/rotki/issues/2342 asset_symbol = str(entry['asset']) free = deserialize_asset_amount(entry['free']) locked = deserialize_asset_amount(entry['locked']) except KeyError as e: raise RemoteError( f'Binance spot balance asset entry did not contain key {str(e)}' ) from e # noqa: E501 except DeserializationError as e: raise RemoteError( 'Failed to deserialize an amount from binance spot balance asset entry' ) from e # noqa: E501 if len(asset_symbol) >= 5 and asset_symbol.startswith('LD'): # Some lending coins also appear to start with the LD prefix. Ignore them continue amount = free + locked if amount == ZERO: continue try: asset = asset_from_binance(asset_symbol) except UnsupportedAsset as e: self.msg_aggregator.add_warning( f'Found unsupported {self.name} asset {e.asset_name}. ' f'Ignoring its balance query.', ) continue except UnknownAsset as e: self.msg_aggregator.add_warning( f'Found unknown {self.name} asset {e.asset_name}. ' f'Ignoring its balance query.', ) continue except DeserializationError: self.msg_aggregator.add_error( f'Found {self.name} asset with non-string type {type(entry["asset"])}. ' f'Ignoring its balance query.', ) continue try: usd_price = Inquirer().find_usd_price(asset) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing {self.name} balance entry due to inability to ' f'query USD price: {str(e)}. Skipping balance entry', ) continue balances[asset] += Balance( amount=amount, usd_value=amount * usd_price, ) return balances
def test_query_vaults_usdc(rotkehlchen_api_server, ethereum_accounts): """Check vault info and details for a vault with USDC as collateral""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { ethereum_accounts[0]: '0xBE79958661741079679aFf75DbEd713cE71a979d', # 7588 } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", )) vaults = assert_proper_response_with_result(response) vault_7588 = MakerdaoVault( identifier=7588, owner=ethereum_accounts[0], collateral_type='USDC-A', urn='0x56D88244073B2fC17af5B1E6088936D5bAaDc37B', collateral_asset=A_USDC, collateral=Balance(ZERO, ZERO), debt=Balance(ZERO, ZERO), collateralization_ratio=None, liquidation_ratio=FVal('1.03'), liquidation_price=None, stability_fee=FVal('0.04'), ) expected_vaults = [vault_7588.serialize()] assert_serialized_lists_equal( expected_vaults, vaults, ignore_keys=['stability_fee', 'liquidation_ratio'], ) response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", )) vault_7588_details = { 'identifier': 7588, 'collateral_asset': 'USDC', 'creation_ts': 1585286480, 'total_interest_owed': '0.00050636718', 'total_liquidated': { 'amount': '0', 'usd_value': '0', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '45', 'usd_value': '45', }, 'timestamp': 1585286480, 'tx_hash': '0x8b553dd0e8ee5385ec91105bf911143666d9df0ecd84c04f288278f7658aa7d6', }, { 'event_type': 'generate', 'value': { 'amount': '20', 'usd_value': '20.46', }, 'timestamp': 1585286480, 'tx_hash': '0x8b553dd0e8ee5385ec91105bf911143666d9df0ecd84c04f288278f7658aa7d6', }, { 'event_type': 'generate', 'value': { 'amount': '15.99', 'usd_value': '16.35777', }, 'timestamp': 1585286769, 'tx_hash': '0xdb861c893a51e4649ff3740cd3658cd4c9b1d048d3b8b4d117f4319bd60aee01', }, { 'event_type': 'payback', 'value': { 'amount': '35.990506367', 'usd_value': '36.818288', }, 'timestamp': 1585290263, 'tx_hash': '0xdd7825fe4a93c6f1ffa25a91b6da2396c229fe16b17242ad5c0bf7962928b2ec', }, { 'event_type': 'withdraw', 'value': { 'amount': '45', 'usd_value': '45', }, 'timestamp': 1585290300, 'tx_hash': '0x97462ebba7ce2467787bf6de25a25c24e538cf8a647919112c5f048b6a293408', }], } details = assert_proper_response_with_result(response) expected_details = [vault_7588_details] assert_serialized_lists_equal(expected_details, details, ignore_keys=['liquidation_ratio'])
def _query_margined_futures_balances( self, api_type: Literal['fapi', 'dapi'], balances: DefaultDict[Asset, Balance], ) -> DefaultDict[Asset, Balance]: """Queries binance margined future balances and if any found adds them to `balances` May raise: - RemoteError """ try: response = self.api_query_list(api_type, 'balance') except BinancePermissionError as e: log.warning( f'Insufficient permission to query {self.name} {api_type} balances.' f'Skipping query. Response details: {str(e)}', ) return balances try: for entry in response: amount = deserialize_asset_amount(entry['balance']) if amount == ZERO: continue try: asset = asset_from_binance(entry['asset']) except UnsupportedAsset as e: self.msg_aggregator.add_warning( f'Found unsupported {self.name} asset {e.asset_name}. ' f'Ignoring its margined futures balance query.', ) continue except UnknownAsset as e: self.msg_aggregator.add_warning( f'Found unknown {self.name} asset {e.asset_name}. ' f'Ignoring its margined futures balance query.', ) continue except DeserializationError: self.msg_aggregator.add_error( f'Found {self.name} asset with non-string type ' f'{type(entry["asset"])}. Ignoring its margined futures balance query.', ) continue try: usd_price = Inquirer().find_usd_price(asset) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing {self.name} balance entry due to inability to ' f'query USD price: {str(e)}. Skipping margined futures balance entry', ) continue balances[asset] += Balance( amount=amount, usd_value=amount * usd_price, ) except KeyError as e: self.msg_aggregator.add_error( f'At {self.name} margined futures balance query did not find ' f'expected key {str(e)}. Skipping margined futures query...', ) return balances
def _calculate_interest_and_profit( self, user_address: ChecksumEthAddress, user_result: Dict[str, Any], actions: List[AaveDepositWithdrawalEvent], balances: AaveBalances, db_interest_events: Set[AaveInterestEvent], from_ts: Timestamp, to_ts: Timestamp, ) -> Tuple[List[AaveInterestEvent], Dict[Asset, Balance]]: reserve_history = {} for reserve in user_result['reserves']: pairs = reserve['id'].split('0x') if len(pairs) != 4: log.error( f'Expected to find 3 addresses in graph\'s reserve history id ' f'but the encountered id does not match: {reserve["id"]}. Skipping entry...', ) continue try: address_s = '0x' + pairs[2] reserve_address = deserialize_ethereum_address(address_s) except DeserializationError: log.error( f'Failed to deserialize reserve address {address_s} ' f'Skipping reserve address {address_s} for user address {user_address}', ) continue atoken_history = _parse_atoken_balance_history( history=reserve['aTokenBalanceHistory'], from_ts=from_ts, to_ts=to_ts, ) reserve_history[reserve_address] = atoken_history interest_events: List[AaveInterestEvent] = [] atoken_balances: Dict[Asset, FVal] = defaultdict(FVal) used_history_indices = set() total_earned: Dict[Asset, Balance] = defaultdict(Balance) # Go through the existing db interest events and add total earned for interest_event in db_interest_events: total_earned[interest_event.asset] += interest_event.value # Create all new interest events in the query actions.sort(key=lambda event: event.timestamp) for action in actions: if action.event_type == 'deposit': atoken_balances[action.asset] += action.value.amount else: # withdrawal atoken_balances[action.asset] -= action.value.amount action_reserve_address = asset_to_aave_reserve(action.asset) if action_reserve_address is None: log.error( f'Could not find aave reserve address for asset' f'{action.asset} in an aave graph response.' f' Skipping entry...', ) continue history = reserve_history.get(action_reserve_address, None) if history is None: log.error( f'Could not find aTokenBalanceHistory for reserve ' f'{action_reserve_address} in an aave graph response.' f' Skipping entry...', ) continue history.sort(key=lambda event: event.timestamp) for idx, entry in enumerate(history): if idx in used_history_indices: continue used_history_indices.add(idx) if entry.tx_hash == action.tx_hash: diff = entry.balance - atoken_balances[action.asset] if diff != ZERO: atoken_balances[action.asset] = entry.balance asset = ASSET_TO_ATOKENV1.get(action.asset, None) if asset is None: log.error( f'Could not find corresponding aToken to ' f'{action.asset.identifier} during an aave graph query' f' Skipping entry...', ) continue timestamp = entry.timestamp usd_price = query_usd_price_zero_if_error( asset=asset, time=timestamp, location='aave interest event from graph query', msg_aggregator=self.msg_aggregator, ) earned_balance = Balance(amount=diff, usd_value=diff * usd_price) interest_event = AaveInterestEvent( event_type='interest', asset=asset, value=earned_balance, block_number=0, # can't get from graph query timestamp=timestamp, tx_hash=entry.tx_hash, # not really the log index, but should also be unique log_index=action.log_index + 1, ) if interest_event in db_interest_events: # This should not really happen since we already query # historical atoken balance history in the new range log.warning( f'During aave subgraph query interest and profit calculation ' f'tried to generate interest event {interest_event} that ' f'already existed in the DB ', ) continue interest_events.append(interest_event) total_earned[asset] += earned_balance # and once done break off the loop break # else this atoken history is not due to an action, so skip it. # It's probably due to a simple transfer atoken_balances[action.asset] = entry.balance if action.event_type == 'deposit': atoken_balances[action.asset] += action.value.amount else: # withdrawal atoken_balances[action.asset] -= action.value.amount # Take aave unpaid interest into account for balance_asset, lending_balance in balances.lending.items(): atoken = ASSET_TO_ATOKENV1.get(balance_asset, None) if atoken is None: log.error( f'Could not find corresponding aToken to ' f'{balance_asset.identifier} during an aave graph unpair interest ' f'query. Skipping entry...', ) continue principal_balance = self.ethereum.call_contract( contract_address=atoken.ethereum_address, abi=ATOKEN_ABI, method_name='principalBalanceOf', arguments=[user_address], ) unpaid_interest = lending_balance.balance.amount - ( principal_balance / (FVal(10)**FVal(atoken.decimals))) # noqa: E501 usd_price = Inquirer().find_usd_price(atoken) total_earned[atoken] += Balance( amount=unpaid_interest, usd_value=unpaid_interest * usd_price, ) return interest_events, total_earned
def _get_borrow_events( self, event_type: Literal['borrow', 'repay'], address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: param_types, param_values = get_common_params(from_ts, to_ts, address) if event_type == 'borrow': graph_event_name = 'borrowEvents' payer_or_empty = '' elif event_type == 'repay': graph_event_name = 'repayEvents' payer_or_empty = 'payer' result = self.graph.query( # type: ignore querystr=BORROW_EVENTS_QUERY_PREFIX.format( graph_event_name=graph_event_name, payer_or_empty=payer_or_empty, ), param_types=param_types, param_values=param_values, ) events = [] for entry in result[graph_event_name]: underlying_symbol = entry['underlyingSymbol'] try: underlying_asset = Asset(underlying_symbol) except UnknownAsset: log.error( f'Found unexpected token symbol {underlying_symbol} during ' f'graph query. Skipping.', ) continue timestamp = entry['blockTime'] usd_price = query_usd_price_zero_if_error( asset=underlying_asset, time=timestamp, location=f'compound {event_type}', msg_aggregator=self.msg_aggregator, ) amount = FVal(entry['amount']) parse_result = _get_txhash_and_logidx(entry['id']) if parse_result is None: log.error( f'Found unprocessable borrow/repay id from the graph {entry["id"]}. Skipping', ) continue events.append( CompoundEvent( event_type=event_type, address=address, block_number=entry['blockNumber'], timestamp=timestamp, asset=underlying_asset, value=Balance(amount=amount, usd_value=amount * usd_price), to_asset=None, to_value=None, realized_pnl=None, tx_hash=parse_result[0], log_index=parse_result[1], )) return events
def test_add_and_get_aave_events(data_dir, username): """Test that get aave events works fine and returns only events for what we need""" msg_aggregator = MessagesAggregator() data = DataHandler(data_dir, msg_aggregator) data.unlock(username, '123', create_new=True) addr1 = make_ethereum_address() addr1_events = [ AaveDepositWithdrawalEvent( event_type='deposit', asset=A_DAI, atoken=A_ADAI_V1, value=Balance(amount=ONE, usd_value=ONE), block_number=1, timestamp=Timestamp(1), tx_hash= '0x01653e88600a6492ad6e9ae2af415c990e623479057e4e93b163e65cfb2d4436', log_index=1, ), AaveDepositWithdrawalEvent( event_type='withdrawal', asset=A_DAI, atoken=A_ADAI_V1, value=Balance(amount=ONE, usd_value=ONE), block_number=2, timestamp=Timestamp(2), tx_hash= '0x4147da3e5d3c0565a99192ce0b32182ab30b8e1067921d9b2a8ef3bd60b7e2ce', log_index=2, ) ] data.db.add_aave_events(address=addr1, events=addr1_events) addr2 = make_ethereum_address() addr2_events = [ AaveDepositWithdrawalEvent( event_type='deposit', asset=A_DAI, atoken=A_ADAI_V1, value=Balance(amount=ONE, usd_value=ONE), block_number=1, timestamp=Timestamp(1), tx_hash= '0x8c094d58f33e8dedcd348cb33b58f3bd447602f1fecb99e51b1c2868029eab55', log_index=1, ), AaveDepositWithdrawalEvent( event_type='withdrawal', asset=A_DAI, atoken=A_ADAI_V1, value=Balance(amount=ONE, usd_value=ONE), block_number=2, timestamp=Timestamp(2), tx_hash= '0x58c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de', log_index=2, ) ] data.db.add_aave_events(address=addr2, events=addr2_events) # addr3 has all types of aave events so we test serialization/deserialization addr3 = make_ethereum_address() addr3_events = [ AaveDepositWithdrawalEvent( event_type='deposit', asset=A_DAI, atoken=A_ADAI_V1, value=Balance(amount=ONE, usd_value=ONE), block_number=1, timestamp=Timestamp(1), tx_hash= '0x9e394d58f33e8dedcd348cb33b58f3bd447602f1fecb99e51b1c2868029eab55', log_index=1, ), AaveDepositWithdrawalEvent( event_type='withdrawal', asset=A_DAI, atoken=A_ADAI_V1, value=Balance(amount=ONE, usd_value=ONE), block_number=2, timestamp=Timestamp(2), tx_hash= '0x4c167445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de', log_index=2, ), AaveInterestEvent( event_type='interest', asset=A_WBTC, value=Balance(amount=ONE, usd_value=ONE), block_number=4, timestamp=Timestamp(4), tx_hash= '0x49c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de', log_index=4, ), AaveBorrowEvent( event_type='borrow', asset=A_ETH, value=Balance(amount=ONE, usd_value=ONE), block_number=5, timestamp=Timestamp(5), tx_hash= '0x19c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de', log_index=5, borrow_rate_mode='stable', borrow_rate=FVal('0.05233232323423432'), accrued_borrow_interest=FVal('5.112234'), ), AaveRepayEvent( event_type='repay', asset=A_MANA, value=Balance(amount=ONE, usd_value=ONE), block_number=6, timestamp=Timestamp(6), tx_hash= '0x29c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de', log_index=6, fee=Balance(amount=FVal('0.1'), usd_value=FVal('0.1')), ), AaveLiquidationEvent( event_type='liquidation', collateral_asset=A_ETH, collateral_balance=Balance(amount=ONE, usd_value=ONE), principal_asset=A_ETH, principal_balance=Balance(amount=ONE, usd_value=ONE), block_number=7, log_index=7, timestamp=Timestamp(7), tx_hash= '0x39c67445d26679623f9b7d56a8be260a275cb6744a1c1ae5a8d6883a5a5c03de', ) ] data.db.add_aave_events(address=addr3, events=addr3_events) events = data.db.get_aave_events(address=addr1, atoken=A_ADAI_V1) assert events == addr1_events events = data.db.get_aave_events(address=addr2, atoken=A_ADAI_V1) assert events == addr2_events events = data.db.get_aave_events(address=addr3) assert events == addr3_events # check that all aave events are properly hashable (aka can go in a set) test_set = set() for event in addr3_events: test_set.add(event) assert len(test_set) == len(addr3_events)
def _get_liquidation_events( self, address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: """https://compound.finance/docs/ctokens#liquidate-borrow""" param_types, param_values = get_common_params(from_ts, to_ts, address) result = self.graph.query( # type: ignore querystr= """liquidationEvents (where: {blockTime_lte: $end_ts, blockTime_gte: $start_ts, from: $address}) { id amount from blockNumber blockTime cTokenSymbol underlyingSymbol underlyingRepayAmount }}""", param_types=param_types, param_values=param_values, ) events = [] for entry in result['liquidationEvents']: ctoken_symbol = entry['cTokenSymbol'] try: ctoken_asset = Asset(ctoken_symbol) except UnknownAsset: log.error( f'Found unexpected cTokenSymbol {ctoken_symbol} during graph query. Skipping.' ) continue underlying_symbol = entry['underlyingSymbol'] try: underlying_asset = Asset(underlying_symbol) except UnknownAsset: log.error( f'Found unexpected token symbol {underlying_symbol} during ' f'graph query. Skipping.', ) continue timestamp = entry['blockTime'] # Amount/value of underlying asset paid by liquidator # Essentially liquidator covers part of the debt of the user debt_amount = FVal(entry['underlyingRepayAmount']) underlying_usd_price = query_usd_price_zero_if_error( asset=underlying_asset, time=timestamp, location='compound liquidation underlying asset', msg_aggregator=self.msg_aggregator, ) debt_usd_value = debt_amount * underlying_usd_price # Amount/value of ctoken_asset lost to the liquidator # This is what the liquidator gains at a discount liquidated_amount = FVal(entry['amount']) liquidated_usd_price = query_usd_price_zero_if_error( asset=ctoken_asset, time=timestamp, location='compound liquidation ctoken asset', msg_aggregator=self.msg_aggregator, ) liquidated_usd_value = liquidated_amount * liquidated_usd_price parse_result = _get_txhash_and_logidx(entry['id']) if parse_result is None: log.error( f'Found unprocessable liquidation id from the graph {entry["id"]}. Skipping', ) continue gained_value = Balance(amount=debt_amount, usd_value=debt_usd_value) lost_value = Balance(amount=liquidated_amount, usd_value=liquidated_usd_value) events.append( CompoundEvent( event_type='liquidation', address=address, block_number=entry['blockNumber'], timestamp=timestamp, asset=underlying_asset, value=gained_value, to_asset=ctoken_asset, to_value=lost_value, realized_pnl=None, tx_hash=parse_result[0], log_index=parse_result[1], )) return events
def get_history_events( self, from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> List[DefiEvent]: """Gets the history events from maker vaults for accounting This is a premium only call. Check happens only in the API level. """ vault_details = self.get_vault_details() events = [] for detail in vault_details: total_vault_dai_balance = Balance() realized_vault_dai_loss = Balance() for event in detail.events: timestamp = event.timestamp if timestamp < from_timestamp: continue if timestamp > to_timestamp: break got_asset: Optional[Asset] spent_asset: Optional[Asset] pnl = got_asset = got_balance = spent_asset = spent_balance = None # noqa: E501 count_spent_got_cost_basis = False if event.event_type == VaultEventType.GENERATE_DEBT: count_spent_got_cost_basis = True got_asset = A_DAI got_balance = event.value total_vault_dai_balance += event.value elif event.event_type == VaultEventType.PAYBACK_DEBT: count_spent_got_cost_basis = True spent_asset = A_DAI spent_balance = event.value total_vault_dai_balance -= event.value if total_vault_dai_balance.amount + realized_vault_dai_loss.amount < ZERO: pnl_balance = total_vault_dai_balance + realized_vault_dai_loss realized_vault_dai_loss += -pnl_balance pnl = [AssetBalance(asset=A_DAI, balance=pnl_balance)] elif event.event_type == VaultEventType.DEPOSIT_COLLATERAL: spent_asset = detail.collateral_asset spent_balance = event.value elif event.event_type == VaultEventType.WITHDRAW_COLLATERAL: got_asset = detail.collateral_asset got_balance = event.value elif event.event_type == VaultEventType.LIQUIDATION: count_spent_got_cost_basis = True # TODO: Don't you also get the dai here -- but how to calculate it? spent_asset = detail.collateral_asset spent_balance = event.value pnl = [ AssetBalance(asset=detail.collateral_asset, balance=-spent_balance) ] else: raise AssertionError( f'Invalid Makerdao vault event type {event.event_type}' ) events.append( DefiEvent( timestamp=timestamp, wrapped_event=event, event_type=DefiEventType.MAKERDAO_VAULT_EVENT, got_asset=got_asset, got_balance=got_balance, spent_asset=spent_asset, spent_balance=spent_balance, pnl=pnl, # Depositing and withdrawing from a vault is not counted in # cost basis. Assets were always yours, you did not rebuy them. # Other actions are counted though to track debt and liquidations count_spent_got_cost_basis=count_spent_got_cost_basis, tx_hash=event.tx_hash, )) return events
def _get_eth2_staking_deposits_onchain( self, addresses: List[ChecksumEthAddress], msg_aggregator: MessagesAggregator, from_ts: Timestamp, to_ts: Timestamp, ) -> List[Eth2Deposit]: from_block = max( ETH2_DEPOSIT.deployed_block, self.ethereum.get_blocknumber_by_time(from_ts), ) to_block = self.ethereum.get_blocknumber_by_time(to_ts) events = ETH2_DEPOSIT.get_logs( ethereum=self.ethereum, event_name='DepositEvent', argument_filters={}, from_block=from_block, to_block=to_block, ) transactions = self.ethereum.transactions.query( addresses=addresses, from_ts=from_ts, to_ts=to_ts, with_limit=False, recent_first=False, ) deposits: List[Eth2Deposit] = [] for transaction in transactions: if transaction.to_address != ETH2_DEPOSIT.address: continue tx_hash = '0x' + transaction.tx_hash.hex() for event in events: # Now find the corresponding event. If no event is found the transaction # probably failed or was something other than a deposit if event['transactionHash'] == tx_hash: decoded_data = decode_event_data(event['data'], EVENT_ABI) # all pylint ignores below due to https://github.com/PyCQA/pylint/issues/4114 amount = int.from_bytes(decoded_data[2], byteorder='little') # pylint: disable=unsubscriptable-object # noqa: E501 usd_price = query_usd_price_zero_if_error( asset=A_ETH, time=transaction.timestamp, location='Eth2 staking query', msg_aggregator=msg_aggregator, ) normalized_amount = from_gwei(FVal(amount)) deposits.append( Eth2Deposit( from_address=transaction.from_address, pubkey='0x' + decoded_data[0].hex(), # pylint: disable=unsubscriptable-object # noqa: E501 withdrawal_credentials='0x' + decoded_data[1].hex(), # pylint: disable=unsubscriptable-object # noqa: E501 value=Balance(normalized_amount, usd_price * normalized_amount), deposit_index=int.from_bytes(decoded_data[4], byteorder='little'), # pylint: disable=unsubscriptable-object # noqa: E501 tx_hash=tx_hash, log_index=event['logIndex'], timestamp=Timestamp(transaction.timestamp), )) break return deposits