def _query_margined_futures_balances( self, api_type: Literal['fapi', 'dapi'], balances: DefaultDict[Asset, Balance], ) -> DefaultDict[Asset, Balance]: """Queries binance margined future balances and if any found adds them to `balances` May raise: - RemoteError """ try: response = self.api_query_list(api_type, 'balance') except BinancePermissionError as e: log.warning( f'Insufficient permission to query {self.name} {api_type} balances.' f'Skipping query. Response details: {str(e)}', ) return balances try: for entry in response: amount = deserialize_asset_amount(entry['balance']) if amount == ZERO: continue try: asset = asset_from_binance(entry['asset']) except UnsupportedAsset as e: self.msg_aggregator.add_warning( f'Found unsupported {self.name} asset {e.asset_name}. ' f'Ignoring its margined futures balance query.', ) continue except UnknownAsset as e: self.msg_aggregator.add_warning( f'Found unknown {self.name} asset {e.asset_name}. ' f'Ignoring its margined futures balance query.', ) continue except DeserializationError: self.msg_aggregator.add_error( f'Found {self.name} asset with non-string type ' f'{type(entry["asset"])}. Ignoring its margined futures balance query.', ) continue try: usd_price = Inquirer().find_usd_price(asset) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing {self.name} balance entry due to inability to ' f'query USD price: {str(e)}. Skipping margined futures balance entry', ) continue balances[asset] += Balance( amount=amount, usd_value=amount * usd_price, ) except KeyError as e: self.msg_aggregator.add_error( f'At {self.name} margined futures balance query did not find ' f'expected key {str(e)}. Skipping margined futures query...', ) return balances
def test_eth2_result_serialization(): addr1 = make_ethereum_address() addr2 = make_ethereum_address() result = Eth2DepositResult( deposits=[ Eth2Deposit( from_address=addr1, pubkey= '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b', # noqa: E501 withdrawal_credentials= '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 value=Balance(FVal(32), FVal(64)), validator_index=9, tx_hash= '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', log_index=22, timestamp=Timestamp(int(1604506685)), ), Eth2Deposit( from_address=addr2, pubkey= '0xa8ff5fc88412d080a297683c25a791ef77eb52d75b265fabab1f2c2591bb927c35818ac6289bc6680ab252787d0ebab3', # noqa: E501 withdrawal_credentials= '0x00cfe1c10347d642a8b8daf86d23bcb368076972691445de2cf517ff43765817', # noqa: E501 value=Balance(FVal(32), FVal(64)), validator_index=1650, tx_hash= '0x6905f4d1843fb8c003c1fbbc2c8e6c5f9792f4f44ddb1122553412ee0b128da7', log_index=221, timestamp=Timestamp(int(1605043544)), ), ], totals={ addr1: Balance(FVal(1), FVal(1)), addr2: Balance(FVal(2), FVal(2)), }, ) serialized = process_result(result) assert serialized == { 'deposits': [ { 'from_address': addr1, 'pubkey': '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b', # noqa: E501 'withdrawal_credentials': '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 'value': { 'amount': '32', 'usd_value': '64' }, 'validator_index': 9, 'tx_hash': '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', 'log_index': 22, 'timestamp': 1604506685, }, { 'from_address': addr2, 'pubkey': '0xa8ff5fc88412d080a297683c25a791ef77eb52d75b265fabab1f2c2591bb927c35818ac6289bc6680ab252787d0ebab3', # noqa: E501 'withdrawal_credentials': '0x00cfe1c10347d642a8b8daf86d23bcb368076972691445de2cf517ff43765817', # noqa: E501 'value': { 'amount': '32', 'usd_value': '64' }, 'validator_index': 1650, 'tx_hash': '0x6905f4d1843fb8c003c1fbbc2c8e6c5f9792f4f44ddb1122553412ee0b128da7', 'log_index': 221, 'timestamp': 1605043544, }, ], 'totals': { addr1: { 'amount': '1', 'usd_value': '1' }, addr2: { 'amount': '2', 'usd_value': '2' }, }, }
def test_query_vaults_wbtc(rotkehlchen_api_server, ethereum_accounts): """Check vault info and details for a vault with WBTC as collateral""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { ethereum_accounts[0]: '0x9684e6C1c7B79868839b27F88bA6d5A176367075', # 8913 } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", )) # That proxy has 3 vaults. We only want to test 8913, which is closed/repaid so just keep that vaults = [ x for x in assert_proper_response_with_result(response) if x['identifier'] == 8913 ] vault_8913 = MakerdaoVault( identifier=8913, owner=ethereum_accounts[0], collateral_type='WBTC-A', urn='0x37f7B3C82A9Edc13FdCcE66E7d500b3698A13294', collateral_asset=A_WBTC, collateral=Balance(ZERO, ZERO), debt=Balance(ZERO, ZERO), collateralization_ratio=None, liquidation_ratio=FVal(1.45), liquidation_price=None, stability_fee=FVal(0.02), ) expected_vaults = [vault_8913.serialize()] assert_serialized_lists_equal(expected_vaults, vaults, ignore_keys=['stability_fee']) # And also make sure that the internal mapping will only query details of 8913 makerdao_vaults = rotki.chain_manager.get_module('makerdao_vaults') makerdao_vaults.vault_mappings = {ethereum_accounts[0]: [vault_8913]} response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", )) vault_8913_details = { 'identifier': 8913, 'collateral_asset': A_WBTC.identifier, 'creation_ts': 1588664698, 'total_interest_owed': '0.1903819198', 'total_liquidated': { 'amount': '0', 'usd_value': '0', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '0.011', 'usd_value': '87.06599', }, 'timestamp': 1588664698, 'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59', }, { 'event_type': 'generate', 'value': { 'amount': '25', 'usd_value': '25.15', }, 'timestamp': 1588664698, 'tx_hash': '0x9ba4a6187fa2c49ba327e7c923846a08a1e972017ec41d3f9f66ef524f7dde59', }, { 'event_type': 'payback', 'value': { 'amount': '25.000248996', 'usd_value': '25.15025', }, 'timestamp': 1588696496, 'tx_hash': '0x8bd960e7eb8b9e2b81d2446d1844dd63f94636c7800ea5e3b4d926ea0244c66c', }, { 'event_type': 'deposit', 'value': { 'amount': '0.0113', 'usd_value': '89.440517', }, 'timestamp': 1588720248, 'tx_hash': '0x678c4da562173c102473f1904ff293a767ebac9ec6c7d728ef2fd41acf00a13a', }], # way too many events in the vault, so no need to check them all } details = assert_proper_response_with_result(response) assert len(details) == 1 assert_serialized_dicts_equal( details[0], vault_8913_details, # Checking only the first 4 events length_list_keymap={'events': 4}, )
def get_eth2_staked_amount( ethereum: 'EthereumManager', addresses: List[ChecksumEthAddress], has_premium: bool, msg_aggregator: MessagesAggregator, database: 'DBHandler', ) -> Eth2DepositResult: """Get the addresses' ETH2 staked amount For any given new address query on-chain from the ETH2 deposit contract deployment timestamp until now. For any existing address query on-chain from the minimum last used query range "end_ts" (among all the existing addresses) until now, as long as the difference between both is gte than REQUEST_DELTA_TS. Then write in DB all the new deposits and finally return them all. """ new_deposits: List[Eth2Deposit] = [] totals: Dict[ChecksumEthAddress, Balance] = {} new_addresses: List[ChecksumEthAddress] = [] existing_addresses: List[ChecksumEthAddress] = [] to_ts = ts_now() min_from_ts = to_ts # Get addresses' last used query range for ETH2 deposits for address in addresses: entry_name = f'{ETH2_DEPOSITS_PREFIX}_{address}' deposits_range = database.get_used_query_range(name=entry_name) if not deposits_range: new_addresses.append(address) else: existing_addresses.append(address) min_from_ts = min(min_from_ts, deposits_range[1]) # Get deposits for new addresses if new_addresses: deposits_ = _get_eth2_staked_amount_onchain( ethereum=ethereum, addresses=new_addresses, has_premium=has_premium, msg_aggregator=msg_aggregator, from_ts=ETH2_DEPLOYED_TS, to_ts=to_ts, ) new_deposits.extend(deposits_) for address in new_addresses: entry_name = f'{ETH2_DEPOSITS_PREFIX}_{address}' database.update_used_query_range( name=entry_name, start_ts=ETH2_DEPLOYED_TS, end_ts=to_ts, ) # Get new deposits for existing addresses if existing_addresses and min_from_ts + REQUEST_DELTA_TS <= to_ts: deposits_ = _get_eth2_staked_amount_onchain( ethereum=ethereum, addresses=existing_addresses, has_premium=has_premium, msg_aggregator=msg_aggregator, from_ts=Timestamp(min_from_ts), to_ts=to_ts, ) new_deposits.extend(deposits_) for address in existing_addresses: entry_name = f'{ETH2_DEPOSITS_PREFIX}_{address}' database.update_used_query_range( name=entry_name, start_ts=Timestamp(min_from_ts), end_ts=to_ts, ) # Insert new deposits in DB if new_deposits: database.add_eth2_deposits(new_deposits) current_usd_price = Inquirer().find_usd_price(Asset('ETH')) # Fetch all DB deposits for the given addresses deposits: List[Eth2Deposit] = [] for address in addresses: db_deposits = database.get_eth2_deposits(address=address) if db_deposits: # Calculate total ETH2 balance per address total_amount = FVal( sum(db_deposit.value.amount for db_deposit in db_deposits)) totals[address] = Balance( amount=total_amount, usd_value=total_amount * current_usd_price, ) deposits.extend(db_deposits) deposits.sort(key=lambda deposit: (deposit.timestamp, deposit.log_index)) return Eth2DepositResult( deposits=deposits, totals=totals, )
'0xfeF0E7635281eF8E3B705e9C5B86e1d3B0eAb397') ADDR2 = deserialize_ethereum_address( '0x00F8a0D8EE1c21151BCcB416bCa1C152f9952D19') ADDR3 = deserialize_ethereum_address( '0x3266F3546a1e5Dc6A15588f3324741A0E20a3B6c') # List of ADDR1, ADDR2 and ADDR3 deposit events from 1604506685 to 1605044577 # sorted by (timestamp, log_index). EXPECTED_DEPOSITS = [ Eth2Deposit( from_address=ADDR1, pubkey= '0xb016e31f633a21fbe42a015152399361184f1e2c0803d89823c224994af74a561c4ad8cfc94b18781d589d03e952cd5b', # noqa: E501 withdrawal_credentials= '0x004c7691c2085648f394ffaef851f3b1d51b95f7263114bc923fc5338f5fc499', # noqa: E501 value=Balance(FVal(32), FVal(64)), validator_index=9, tx_hash= '0xd9eca1c2a0c5ff2f25071713432b21cc4d0ff2e8963edc63a48478e395e08db1', log_index=22, timestamp=Timestamp(int(1604506685)), ), Eth2Deposit( from_address=ADDR3, pubkey= '0x90b2f65cb43d9cdb2279af9f76010d667b9d8d72e908f2515497a7102820ce6bb15302fe2b8dc082fce9718569344ad8', # noqa: E501 withdrawal_credentials= '0x00a257d19e1650dec1ab59fc9e1cb9a9fc2fe7265b0f27e7d79ff61aeff0a1f0', # noqa: E501 value=Balance(FVal(32), FVal(64)), validator_index=993, tx_hash=
def _get_borrow_events( self, event_type: Literal['borrow', 'repay'], address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: param_types, param_values = get_common_params(from_ts, to_ts, address) if event_type == 'borrow': graph_event_name = 'borrowEvents' payer_or_empty = '' elif event_type == 'repay': graph_event_name = 'repayEvents' payer_or_empty = 'payer' result = self.graph.query( # type: ignore querystr=BORROW_EVENTS_QUERY_PREFIX.format( graph_event_name=graph_event_name, payer_or_empty=payer_or_empty, ), param_types=param_types, param_values=param_values, ) events = [] for entry in result[graph_event_name]: underlying_symbol = entry['underlyingSymbol'] try: underlying_asset = Asset(underlying_symbol) except UnknownAsset: log.error( f'Found unexpected token symbol {underlying_symbol} during ' f'graph query. Skipping.', ) continue timestamp = entry['blockTime'] usd_price = query_usd_price_zero_if_error( asset=underlying_asset, time=timestamp, location=f'compound {event_type}', msg_aggregator=self.msg_aggregator, ) amount = FVal(entry['amount']) parse_result = _get_txhash_and_logidx(entry['id']) if parse_result is None: log.error( f'Found unprocessable borrow/repay id from the graph {entry["id"]}. Skipping', ) continue events.append( CompoundEvent( event_type=event_type, address=address, block_number=entry['blockNumber'], timestamp=timestamp, asset=underlying_asset, value=Balance(amount=amount, usd_value=amount * usd_price), to_asset=None, to_value=None, realized_pnl=None, tx_hash=parse_result[0], log_index=parse_result[1], )) return events
def _get_lend_events( self, event_type: Literal['mint', 'redeem'], address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: param_types, param_values = get_common_params(from_ts, to_ts, address) if event_type == 'mint': graph_event_name = 'mintEvents' addr_position = 'to' elif event_type == 'redeem': graph_event_name = 'redeemEvents' addr_position = 'from' result = self.graph.query( # type: ignore querystr=LEND_EVENTS_QUERY_PREFIX.format( graph_event_name=graph_event_name, addr_position=addr_position, ), param_types=param_types, param_values=param_values, ) events = [] for entry in result[graph_event_name]: ctoken_symbol = entry['cTokenSymbol'] try: ctoken_asset = Asset(ctoken_symbol) except UnknownAsset: log.error( f'Found unexpected cTokenSymbol {ctoken_symbol} during graph query. Skipping.' ) continue underlying_symbol = ctoken_symbol[1:] try: underlying_asset = Asset(underlying_symbol) except UnknownAsset: log.error( f'Found unexpected token symbol {underlying_symbol} during ' f'graph query. Skipping.', ) continue timestamp = entry['blockTime'] usd_price = query_usd_price_zero_if_error( asset=underlying_asset, time=timestamp, location=f'compound {event_type}', msg_aggregator=self.msg_aggregator, ) underlying_amount = FVal(entry['underlyingAmount']) usd_value = underlying_amount * usd_price parse_result = _get_txhash_and_logidx(entry['id']) if parse_result is None: log.error( f'Found unprocessable mint id from the graph {entry["id"]}. Skipping' ) continue amount = FVal(entry['amount']) if event_type == 'mint': from_value = Balance(amount=underlying_amount, usd_value=usd_value) to_value = Balance(amount=amount, usd_value=usd_value) from_asset = underlying_asset to_asset = ctoken_asset else: # redeem from_value = Balance(amount=amount, usd_value=usd_value) to_value = Balance(amount=underlying_amount, usd_value=usd_value) from_asset = ctoken_asset to_asset = underlying_asset events.append( CompoundEvent( event_type=event_type, address=address, block_number=entry['blockNumber'], timestamp=timestamp, asset=from_asset, value=from_value, to_asset=to_asset, to_value=to_value, realized_pnl=None, tx_hash=parse_result[0], log_index=parse_result[1], )) return events
def _get_balances_graph( addresses: List[ChecksumEthAddress], graph_query: Callable, ) -> ProtocolBalance: """Get the addresses' pools data querying the Uniswap subgraph Each liquidity position is converted into a <LiquidityPool>. """ address_balances: DDAddressBalances = defaultdict(list) known_assets: Set[EthereumToken] = set() unknown_assets: Set[UnknownEthereumToken] = set() addresses_lower = [address.lower() for address in addresses] querystr = format_query_indentation(LIQUIDITY_POSITIONS_QUERY.format()) param_types = { '$limit': 'Int!', '$offset': 'Int!', '$addresses': '[String!]', '$balance': 'BigDecimal!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'addresses': addresses_lower, 'balance': '0', } while True: result = graph_query( querystr=querystr, param_types=param_types, param_values=param_values, ) result_data = result['liquidityPositions'] for lp in result_data: user_address = to_checksum_address(lp['user']['id']) user_lp_balance = FVal(lp['liquidityTokenBalance']) lp_pair = lp['pair'] lp_address = to_checksum_address(lp_pair['id']) lp_total_supply = FVal(lp_pair['totalSupply']) # Insert LP tokens reserves within tokens dicts token0 = lp_pair['token0'] token0['total_amount'] = lp_pair['reserve0'] token1 = lp_pair['token1'] token1['total_amount'] = lp_pair['reserve1'] liquidity_pool_assets = [] for token in token0, token1: # Get the token <EthereumToken> or <UnknownEthereumToken> asset = get_ethereum_token( symbol=token['symbol'], ethereum_address=to_checksum_address(token['id']), name=token['name'], decimals=int(token['decimals']), ) # Classify the asset either as known or unknown if isinstance(asset, EthereumToken): known_assets.add(asset) elif isinstance(asset, UnknownEthereumToken): unknown_assets.add(asset) # Estimate the underlying asset total_amount asset_total_amount = FVal(token['total_amount']) user_asset_balance = (user_lp_balance / lp_total_supply * asset_total_amount) liquidity_pool_asset = LiquidityPoolAsset( asset=asset, total_amount=asset_total_amount, user_balance=Balance(amount=user_asset_balance), ) liquidity_pool_assets.append(liquidity_pool_asset) liquidity_pool = LiquidityPool( address=lp_address, assets=liquidity_pool_assets, total_supply=lp_total_supply, user_balance=Balance(amount=user_lp_balance), ) address_balances[user_address].append(liquidity_pool) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } protocol_balance = ProtocolBalance( address_balances=dict(address_balances), known_assets=known_assets, unknown_assets=unknown_assets, ) return protocol_balance
def _get_balances_graph( self, addresses: List[ChecksumEthAddress], ) -> ProtocolBalance: """Get the addresses' pools data querying this AMM's subgraph Each liquidity position is converted into a <LiquidityPool>. """ address_balances: DDAddressToLPBalances = defaultdict(list) known_assets: Set[EthereumToken] = set() unknown_assets: Set[EthereumToken] = set() addresses_lower = [address.lower() for address in addresses] querystr = format_query_indentation(LIQUIDITY_POSITIONS_QUERY.format()) query_id = '0' query_offset = 0 param_types = { '$limit': 'Int!', '$offset': 'Int!', '$addresses': '[String!]', '$balance': 'BigDecimal!', '$id': 'ID!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'addresses': addresses_lower, 'balance': '0', 'id': query_id, } while True: try: result = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format( error_msg=str(e), location=self.location, ), ) raise result_data = result['liquidityPositions'] for lp in result_data: lp_pair = lp['pair'] lp_total_supply = FVal(lp_pair['totalSupply']) user_lp_balance = FVal(lp['liquidityTokenBalance']) try: user_address = deserialize_ethereum_address( lp['user']['id']) lp_address = deserialize_ethereum_address(lp_pair['id']) except DeserializationError as e: msg = ( f'Failed to Deserialize address. Skipping {self.location} ' f'pool {lp_pair} with user address {lp["user"]["id"]}') log.error(msg) raise RemoteError(msg) from e # Insert LP tokens reserves within tokens dicts token0 = lp_pair['token0'] token0['total_amount'] = lp_pair['reserve0'] token1 = lp_pair['token1'] token1['total_amount'] = lp_pair['reserve1'] liquidity_pool_assets = [] for token in token0, token1: try: deserialized_eth_address = deserialize_ethereum_address( token['id']) except DeserializationError as e: msg = ( f'Failed to deserialize token address {token["id"]} ' f'Bad token address in {self.location} lp pair came from the graph.' ) log.error(msg) raise RemoteError(msg) from e asset = get_or_create_ethereum_token( userdb=self.database, symbol=token['symbol'], ethereum_address=deserialized_eth_address, name=token['name'], decimals=int(token['decimals']), ) if asset.has_oracle(): known_assets.add(asset) else: unknown_assets.add(asset) # Estimate the underlying asset total_amount asset_total_amount = FVal(token['total_amount']) user_asset_balance = (user_lp_balance / lp_total_supply * asset_total_amount) liquidity_pool_asset = LiquidityPoolAsset( asset=asset, total_amount=asset_total_amount, user_balance=Balance(amount=user_asset_balance), ) liquidity_pool_assets.append(liquidity_pool_asset) liquidity_pool = LiquidityPool( address=lp_address, assets=liquidity_pool_assets, total_supply=lp_total_supply, user_balance=Balance(amount=user_lp_balance), ) address_balances[user_address].append(liquidity_pool) query_id = lp['id'] # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step if query_offset == GRAPH_QUERY_SKIP_LIMIT: query_offset = 0 new_query_id = query_id else: query_offset += GRAPH_QUERY_LIMIT new_query_id = '0' param_values = { **param_values, 'id': new_query_id, 'offset': query_offset, } protocol_balance = ProtocolBalance( address_balances=dict(address_balances), known_assets=known_assets, unknown_assets=unknown_assets, ) return protocol_balance
def _query_vault_data( self, identifier: int, owner: ChecksumEthAddress, urn: ChecksumEthAddress, ilk: bytes, ) -> Optional[MakerDAOVault]: collateral_type = ilk.split(b'\0', 1)[0].decode() asset_symbol = collateral_type.split('-')[0] if asset_symbol not in ('ETH', 'BAT', 'USDC', 'WBTC', 'KNC', 'ZRX'): self.msg_aggregator.add_warning( f'Detected vault with {asset_symbol} as collateral. That is not yet ' f'supported by rotki', ) return None result = self.ethereum.call_contract( contract_address=MAKERDAO_VAT.address, abi=MAKERDAO_VAT.abi, method_name='urns', arguments=[ilk, urn], ) # also known as ink in their contract collateral_amount = FVal(result[0] / WAD) normalized_debt = result[1] # known as art in their contract result = self.ethereum.call_contract( contract_address=MAKERDAO_VAT.address, abi=MAKERDAO_VAT.abi, method_name='ilks', arguments=[ilk], ) rate = result[1] # Accumulated Rates spot = FVal(result[2]) # Price with Safety Margin # How many DAI owner needs to pay back to the vault debt_value = FVal(((normalized_debt / WAD) * rate) / RAY) result = self.ethereum.call_contract( contract_address=MAKERDAO_SPOT.address, abi=MAKERDAO_SPOT.abi, method_name='ilks', arguments=[ilk], ) mat = result[1] liquidation_ratio = FVal(mat / RAY) asset = Asset(asset_symbol) price = FVal((spot / RAY) * liquidation_ratio) self.usd_price[asset_symbol] = price collateral_value = FVal(price * collateral_amount) if debt_value == 0: collateralization_ratio = None else: collateralization_ratio = FVal(collateral_value / debt_value).to_percentage(2) collateral_usd_value = price * collateral_amount if collateral_amount == 0: liquidation_price = None else: liquidation_price = (debt_value * liquidation_ratio) / collateral_amount dai_usd_price = Inquirer().find_usd_price(A_DAI) return MakerDAOVault( identifier=identifier, owner=owner, collateral_type=collateral_type, collateral_asset=asset, collateral=Balance(collateral_amount, collateral_usd_value), debt=Balance(debt_value, dai_usd_price * debt_value), liquidation_ratio=liquidation_ratio, collateralization_ratio=collateralization_ratio, liquidation_price=liquidation_price, urn=urn, stability_fee=self.get_stability_fee(ilk), )
def _query_vault_details( self, vault: MakerDAOVault, proxy: ChecksumEthAddress, urn: ChecksumEthAddress, ) -> Optional[MakerDAOVaultDetails]: asset_symbol = vault.collateral_asset.identifier # They can raise: # ConversionError due to hex_or_bytes_to_address, hex_or_bytes_to_int # RemoteError due to external query errors events = self.ethereum.get_logs( contract_address=MAKERDAO_CDP_MANAGER.address, abi=MAKERDAO_CDP_MANAGER.abi, event_name='NewCdp', argument_filters={'cdp': vault.identifier}, from_block=MAKERDAO_CDP_MANAGER.deployed_block, ) if len(events) == 0: self.msg_aggregator.add_error( 'No events found for a Vault creation. This should never ' 'happen. Please open a bug report: https://github.com/rotki/rotki/issues', ) return None elif len(events) != 1: log.error( f'Multiple events found for a Vault creation: {events}. Taking ' f'only the first. This should not happen. Something is wrong', ) self.msg_aggregator.add_error( 'Multiple events found for a Vault creation. This should never ' 'happen. Please open a bug report: https://github.com/rotki/rotki/issues', ) creation_ts = self.ethereum.get_event_timestamp(events[0]) # get vat frob events for cross-checking argument_filters = { 'sig': '0x76088703', # frob 'arg1': '0x' + vault.ilk.hex(), # ilk 'arg2': address_to_bytes32(urn), # urn # arg3 can be urn for the 1st deposit, and proxy/owner for the next ones # so don't filter for it # 'arg3': address_to_bytes32(proxy), # proxy - owner } frob_events = self.ethereum.get_logs( contract_address=MAKERDAO_VAT.address, abi=MAKERDAO_VAT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_VAT.deployed_block, ) frob_event_tx_hashes = [x['transactionHash'] for x in frob_events] gemjoin = GEMJOIN_MAPPING[vault.collateral_type] vault_events = [] # Get the collateral deposit events argument_filters = { 'sig': '0x3b4da69f', # join # In cases where a CDP has been migrated from a SAI CDP to a DAI # Vault the usr in the first deposit will be the old address. To # detect the first deposit in these cases we need to check for # arg1 being the urn # 'usr': proxy, 'arg1': address_to_bytes32(urn), } events = self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, ) # all subsequent deposits should have the proxy as a usr # but for non-migrated CDPS the previous query would also work # so in those cases we will have the first deposit 2 times argument_filters = { 'sig': '0x3b4da69f', # join 'usr': proxy, } events.extend( self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, )) deposit_tx_hashes = set() for event in events: tx_hash = event['transactionHash'] if tx_hash in deposit_tx_hashes: # Skip duplicate deposit that would be detected in non migrated CDP case continue if tx_hash not in frob_event_tx_hashes: # If there is no corresponding frob event then skip continue deposit_tx_hashes.add(tx_hash) amount = _normalize_amount( asset_symbol=asset_symbol, amount=hex_or_bytes_to_int(event['topics'][3]), ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral deposit', ) vault_events.append( VaultEvent( event_type=VaultEventType.DEPOSIT_COLLATERAL, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=tx_hash, )) # Get the collateral withdrawal events argument_filters = { 'sig': '0xef693bed', # exit 'usr': proxy, } events = self.ethereum.get_logs( contract_address=gemjoin.address, abi=gemjoin.abi, event_name='LogNote', argument_filters=argument_filters, from_block=gemjoin.deployed_block, ) for event in events: tx_hash = event['transactionHash'] if tx_hash not in frob_event_tx_hashes: # If there is no corresponding frob event then skip continue amount = _normalize_amount( asset_symbol=asset_symbol, amount=hex_or_bytes_to_int(event['topics'][3]), ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral withdrawal', ) vault_events.append( VaultEvent( event_type=VaultEventType.WITHDRAW_COLLATERAL, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) total_dai_wei = 0 # Get the dai generation events argument_filters = { 'sig': '0xbb35783b', # move 'arg1': address_to_bytes32(urn), # For CDPs that were created by migrating from SAI the first DAI generation # during vault creation will have the old owner as arg2. So we can't # filter for it here. Still seems like the urn as arg1 is sufficient # 'arg2': address_to_bytes32(proxy), } events = self.ethereum.get_logs( contract_address=MAKERDAO_VAT.address, abi=MAKERDAO_VAT.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_VAT.deployed_block, ) for event in events: given_amount = _shift_num_right_by( hex_or_bytes_to_int(event['topics'][3]), RAY_DIGITS) total_dai_wei += given_amount amount = _normalize_amount( asset_symbol='DAI', amount=given_amount, ) timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='vault debt generation', ) vault_events.append( VaultEvent( event_type=VaultEventType.GENERATE_DEBT, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) # Get the dai payback events argument_filters = { 'sig': '0x3b4da69f', # join 'usr': proxy, 'arg1': address_to_bytes32(urn), } events = self.ethereum.get_logs( contract_address=MAKERDAO_DAI_JOIN.address, abi=MAKERDAO_DAI_JOIN.abi, event_name='LogNote', argument_filters=argument_filters, from_block=MAKERDAO_DAI_JOIN.deployed_block, ) for event in events: given_amount = hex_or_bytes_to_int(event['topics'][3]) total_dai_wei -= given_amount amount = _normalize_amount( asset_symbol='DAI', amount=given_amount, ) if amount == ZERO: # it seems there is a zero DAI value transfer from the urn when # withdrawing ETH. So we should ignore these as events continue timestamp = self.ethereum.get_event_timestamp(event) usd_price = query_usd_price_or_use_default( asset=A_DAI, time=timestamp, default_value=FVal(1), location='vault debt payback', ) vault_events.append( VaultEvent( event_type=VaultEventType.PAYBACK_DEBT, value=Balance(amount, amount * usd_price), timestamp=timestamp, tx_hash=event['transactionHash'], )) # Get the liquidation events argument_filters = {'urn': urn} events = self.ethereum.get_logs( contract_address=MAKERDAO_CAT.address, abi=MAKERDAO_CAT.abi, event_name='Bite', argument_filters=argument_filters, from_block=MAKERDAO_CAT.deployed_block, ) sum_liquidation_amount = ZERO sum_liquidation_usd = ZERO for event in events: if isinstance(event['data'], str): lot = event['data'][:66] else: # bytes lot = event['data'][:32] amount = _normalize_amount( asset_symbol=asset_symbol, amount=hex_or_bytes_to_int(lot), ) timestamp = self.ethereum.get_event_timestamp(event) sum_liquidation_amount += amount usd_price = query_usd_price_or_use_default( asset=vault.collateral_asset, time=timestamp, default_value=ZERO, location='vault collateral liquidation', ) amount_usd_value = amount * usd_price sum_liquidation_usd += amount_usd_value vault_events.append( VaultEvent( event_type=VaultEventType.LIQUIDATION, value=Balance(amount, amount_usd_value), timestamp=timestamp, tx_hash=event['transactionHash'], )) total_interest_owed = vault.debt.amount - _normalize_amount( asset_symbol='DAI', amount=total_dai_wei, ) # sort vault events by timestamp vault_events.sort(key=lambda event: event.timestamp) return MakerDAOVaultDetails( identifier=vault.identifier, total_interest_owed=total_interest_owed, creation_ts=creation_ts, total_liquidated=Balance(sum_liquidation_amount, sum_liquidation_usd), events=vault_events, )
def query_balances(self) -> ExchangeQueryBalances: try: resp = self._api_query('accounts') except RemoteError as e: msg = ('Coinbase API request failed. Could not reach coinbase due ' 'to {}'.format(e)) log.error(msg) return None, msg returned_balances: DefaultDict[Asset, Balance] = defaultdict(Balance) for account in resp: try: if not account['balance']: continue amount = deserialize_asset_amount(account['balance']['amount']) # ignore empty balances. Coinbase returns zero balances for everything # a user does not own if amount == ZERO: continue asset = asset_from_coinbase(account['balance']['currency']) try: usd_price = Inquirer().find_usd_price(asset=asset) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing coinbase balance entry due to inability to ' f'query USD price: {str(e)}. Skipping balance entry', ) continue returned_balances[asset] += Balance( amount=amount, usd_value=amount * usd_price, ) except UnknownAsset as e: self.msg_aggregator.add_warning( f'Found coinbase balance result with unknown asset ' f'{e.asset_name}. Ignoring it.', ) continue except UnsupportedAsset as e: self.msg_aggregator.add_warning( f'Found coinbase balance result with unsupported asset ' f'{e.asset_name}. Ignoring it.', ) continue except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' self.msg_aggregator.add_error( 'Error processing a coinbase account balance. Check logs ' 'for details. Ignoring it.', ) log.error( 'Error processing a coinbase account balance', account_balance=account, error=msg, ) continue return dict(returned_balances), ''
def query_balances( self, requested_save_data: bool = False, save_despite_errors: bool = False, timestamp: Timestamp = None, ignore_cache: bool = False, ) -> Dict[str, Any]: """Query all balances rotkehlchen can see. If requested_save_data is True then the data are always saved in the DB, if it is False then data are saved if self.data.should_save_balances() is True. If save_despite_errors is True then even if there is any error the snapshot will be saved. If timestamp is None then the current timestamp is used. If a timestamp is given then that is the time that the balances are going to be saved in the DB If ignore_cache is True then all underlying calls that have a cache ignore it Returns a dictionary with the queried balances. """ log.info( 'query_balances called', requested_save_data=requested_save_data, save_despite_errors=save_despite_errors, ) balances: Dict[str, Dict[Asset, Balance]] = {} problem_free = True for exchange in self.exchange_manager.iterate_exchanges(): exchange_balances, error_msg = exchange.query_balances( ignore_cache=ignore_cache) # If we got an error, disregard that exchange but make sure we don't save data if not isinstance(exchange_balances, dict): problem_free = False self.msg_aggregator.add_message( message_type=WSMessageType.BALANCE_SNAPSHOT_ERROR, data={ 'location': exchange.name, 'error': error_msg }, ) else: location_str = str(exchange.location) if location_str not in balances: balances[location_str] = exchange_balances else: # multiple exchange of same type. Combine balances balances[location_str] = combine_dicts( balances[location_str], exchange_balances, ) liabilities: Dict[Asset, Balance] try: blockchain_result = self.chain_manager.query_balances( blockchain=None, force_token_detection=ignore_cache, ignore_cache=ignore_cache, ) if len(blockchain_result.totals.assets) != 0: balances[str( Location.BLOCKCHAIN)] = blockchain_result.totals.assets liabilities = blockchain_result.totals.liabilities except (RemoteError, EthSyncError) as e: problem_free = False liabilities = {} log.error(f'Querying blockchain balances failed due to: {str(e)}') self.msg_aggregator.add_message( message_type=WSMessageType.BALANCE_SNAPSHOT_ERROR, data={ 'location': 'blockchain balances query', 'error': str(e) }, ) manually_tracked_liabilities = get_manually_tracked_balances( db=self.data.db, balance_type=BalanceType.LIABILITY, ) manual_liabilities_as_dict: DefaultDict[Asset, Balance] = defaultdict(Balance) for manual_liability in manually_tracked_liabilities: manual_liabilities_as_dict[ manual_liability.asset] += manual_liability.value liabilities = combine_dicts(liabilities, manual_liabilities_as_dict) # retrieve loopring balances if module is activated if self.chain_manager.get_module('loopring'): try: loopring_balances = self.chain_manager.get_loopring_balances() except RemoteError as e: problem_free = False self.msg_aggregator.add_message( message_type=WSMessageType.BALANCE_SNAPSHOT_ERROR, data={ 'location': 'loopring', 'error': str(e) }, ) else: if len(loopring_balances) != 0: balances[str(Location.LOOPRING)] = loopring_balances # retrieve nft balances if module is activated nfts = self.chain_manager.get_module('nfts') if nfts is not None: try: nft_mapping = nfts.get_balances( addresses=self.chain_manager.queried_addresses_for_module( 'nfts'), return_zero_values=False, ignore_cache=False, ) except RemoteError as e: problem_free = False self.msg_aggregator.add_message( message_type=WSMessageType.BALANCE_SNAPSHOT_ERROR, data={ 'location': 'nfts', 'error': str(e) }, ) else: if len(nft_mapping) != 0: if str(Location.BLOCKCHAIN) not in balances: balances[str(Location.BLOCKCHAIN)] = {} for _, nft_balances in nft_mapping.items(): for balance_entry in nft_balances: balances[str(Location.BLOCKCHAIN)][Asset( balance_entry['id'])] = Balance( amount=FVal(1), usd_value=balance_entry['usd_price'], ) balances = account_for_manually_tracked_asset_balances( db=self.data.db, balances=balances) # Calculate usd totals assets_total_balance: DefaultDict[Asset, Balance] = defaultdict(Balance) total_usd_per_location: Dict[str, FVal] = {} for location, asset_balance in balances.items(): total_usd_per_location[location] = ZERO for asset, balance in asset_balance.items(): assets_total_balance[asset] += balance total_usd_per_location[location] += balance.usd_value net_usd = sum((balance.usd_value for _, balance in assets_total_balance.items()), ZERO) liabilities_total_usd = sum( (liability.usd_value for _, liability in liabilities.items()), ZERO) # noqa: E501 net_usd -= liabilities_total_usd # Calculate location stats location_stats: Dict[str, Any] = {} for location, total_usd in total_usd_per_location.items(): if location == str(Location.BLOCKCHAIN): total_usd -= liabilities_total_usd percentage = (total_usd / net_usd).to_percentage() if net_usd != ZERO else '0%' location_stats[location] = { 'usd_value': total_usd, 'percentage_of_net_value': percentage, } # Calculate 'percentage_of_net_value' per asset assets_total_balance_as_dict: Dict[Asset, Dict[str, Any]] = { asset: balance.to_dict() for asset, balance in assets_total_balance.items() } liabilities_as_dict: Dict[Asset, Dict[str, Any]] = { asset: balance.to_dict() for asset, balance in liabilities.items() } for asset, balance_dict in assets_total_balance_as_dict.items(): percentage = (balance_dict['usd_value'] / net_usd).to_percentage( ) if net_usd != ZERO else '0%' # noqa: E501 assets_total_balance_as_dict[asset][ 'percentage_of_net_value'] = percentage for asset, balance_dict in liabilities_as_dict.items(): percentage = (balance_dict['usd_value'] / net_usd).to_percentage( ) if net_usd != ZERO else '0%' # noqa: E501 liabilities_as_dict[asset]['percentage_of_net_value'] = percentage # Compose balances response result_dict = { 'assets': assets_total_balance_as_dict, 'liabilities': liabilities_as_dict, 'location': location_stats, 'net_usd': net_usd, } allowed_to_save = requested_save_data or self.data.should_save_balances( ) if (problem_free or save_despite_errors) and allowed_to_save: if not timestamp: timestamp = Timestamp(int(time.time())) self.data.db.save_balances_data(data=result_dict, timestamp=timestamp) log.debug('query_balances data saved') else: log.debug( 'query_balances data not saved', allowed_to_save=allowed_to_save, problem_free=problem_free, save_despite_errors=save_despite_errors, ) return result_dict
def get_history_events( self, from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> List[DefiEvent]: """Gets the history events from maker vaults for accounting This is a premium only call. Check happens only in the API level. """ vault_details = self.get_vault_details() events = [] for detail in vault_details: total_vault_dai_balance = Balance() realized_vault_dai_loss = Balance() for event in detail.events: timestamp = event.timestamp if timestamp < from_timestamp: continue if timestamp > to_timestamp: break got_asset: Optional[Asset] spent_asset: Optional[Asset] pnl = got_asset = got_balance = spent_asset = spent_balance = None # noqa: E501 count_spent_got_cost_basis = False if event.event_type == VaultEventType.GENERATE_DEBT: count_spent_got_cost_basis = True got_asset = A_DAI got_balance = event.value total_vault_dai_balance += event.value elif event.event_type == VaultEventType.PAYBACK_DEBT: count_spent_got_cost_basis = True spent_asset = A_DAI spent_balance = event.value total_vault_dai_balance -= event.value if total_vault_dai_balance.amount + realized_vault_dai_loss.amount < ZERO: pnl_balance = total_vault_dai_balance + realized_vault_dai_loss realized_vault_dai_loss += -pnl_balance pnl = [AssetBalance(asset=A_DAI, balance=pnl_balance)] elif event.event_type == VaultEventType.DEPOSIT_COLLATERAL: spent_asset = detail.collateral_asset spent_balance = event.value elif event.event_type == VaultEventType.WITHDRAW_COLLATERAL: got_asset = detail.collateral_asset got_balance = event.value elif event.event_type == VaultEventType.LIQUIDATION: count_spent_got_cost_basis = True # TODO: Don't you also get the dai here -- but how to calculate it? spent_asset = detail.collateral_asset spent_balance = event.value pnl = [AssetBalance(asset=detail.collateral_asset, balance=-spent_balance)] else: raise AssertionError(f'Invalid Makerdao vault event type {event.event_type}') events.append(DefiEvent( timestamp=timestamp, wrapped_event=event, event_type=DefiEventType.MAKERDAO_VAULT_EVENT, got_asset=got_asset, got_balance=got_balance, spent_asset=spent_asset, spent_balance=spent_balance, pnl=pnl, # Depositing and withdrawing from a vault is not counted in # cost basis. Assets were always yours, you did not rebuy them. # Other actions are counted though to track debt and liquidations count_spent_got_cost_basis=count_spent_got_cost_basis, tx_hash=event.tx_hash, )) return events
def get_history_for_address( self, user_address: ChecksumEthAddress, to_block: int, atokens_list: Optional[List[EthereumToken]] = None, given_from_block: Optional[int] = None, ) -> AaveHistory: """ Queries aave history for a single address. This function should be entered while holding the history_lock semaphore """ # Get all deposit events for the address from_block = AAVE_LENDING_POOL.deployed_block if given_from_block is None else given_from_block # noqa: E501 argument_filters = { '_user': user_address, } query_events = True if given_from_block is not None and to_block - given_from_block < MAX_BLOCKTIME_CACHE: # noqa: E501 query_events = False # Save time by not querying events if last query is recent deposit_events = [] withdraw_events = [] if query_events: deposit_events.extend( self.ethereum.get_logs( contract_address=AAVE_LENDING_POOL.address, abi=AAVE_LENDING_POOL.abi, event_name='Deposit', argument_filters=argument_filters, from_block=from_block, to_block=to_block, )) withdraw_events.extend( self.ethereum.get_logs( contract_address=AAVE_LENDING_POOL.address, abi=AAVE_LENDING_POOL.abi, event_name='RedeemUnderlying', argument_filters=argument_filters, from_block=from_block, to_block=to_block, )) # now for each atoken get all mint events and pass then to profit calculation tokens = atokens_list if atokens_list is not None else ATOKENS_LIST total_address_events = [] total_earned_map: Dict[Asset, Balance] = {} for token in tokens: log.debug( f'Querying aave events for {user_address} and token ' f'{token.identifier} with query_events={query_events}', ) events = [] if given_from_block: events.extend( self.database.get_aave_events(user_address, token)) events = cast(List[AaveSimpleEvent], events) # type: ignore new_events = [] if query_events: new_events = self.get_events_for_atoken_and_address( user_address=user_address, atoken=token, deposit_events=deposit_events, withdraw_events=withdraw_events, from_block=from_block, to_block=to_block, ) events.extend(new_events) total_balance = Balance() for x in events: if x.event_type == 'interest': total_balance += x.value # type: ignore # If the user still has balance in Aave we also need to see how much # accrued interest has not been yet paid out # TODO: ARCHIVE if to_block is not latest here we should get the balance # from the old block. Means using archive node balance = self.ethereum.call_contract( contract_address=token.ethereum_address, abi=ATOKEN_ABI, method_name='balanceOf', arguments=[user_address], ) principal_balance = self.ethereum.call_contract( contract_address=token.ethereum_address, abi=ATOKEN_ABI, method_name='principalBalanceOf', arguments=[user_address], ) if len(events) == 0 and balance == 0 and principal_balance == 0: # Nothing for this aToken for this address continue unpaid_interest = (balance - principal_balance) / (FVal(10)**FVal( token.decimals)) usd_price = Inquirer().find_usd_price(token) total_balance += Balance( amount=unpaid_interest, usd_value=unpaid_interest * usd_price, ) total_earned_map[token] = total_balance total_address_events.extend(events) # now update the DB with the recently queried events self.database.add_aave_events(user_address, new_events) # After all events have been queried then also update the query range. # Even if no events are found for an address we need to remember the range self.database.update_used_block_query_range( name=f'aave_events_{user_address}', from_block=AAVE_LENDING_POOL.deployed_block, to_block=to_block, ) total_address_events.sort(key=lambda event: event.timestamp) return AaveHistory( events=total_address_events, total_earned_interest=total_earned_map, total_lost={}, total_earned_liquidations={}, )
def _add_protocol_balances(self) -> None: """Also count token balances that may come from various protocols""" # If we have anything in DSR also count it towards total blockchain balances eth_balances = self.balances.eth dsr_module = self.makerdao_dsr if dsr_module is not None: additional_total = Balance() current_dsr_report = dsr_module.get_current_dsr() for dsr_account, balance_entry in current_dsr_report.balances.items(): if balance_entry.amount == ZERO: continue eth_balances[dsr_account].assets[A_DAI] += balance_entry additional_total += balance_entry if additional_total.amount != ZERO: self.totals.assets[A_DAI] += additional_total # Also count the vault balance and defi saver wallets and add it to the totals vaults_module = self.makerdao_vaults if vaults_module is not None: balances = vaults_module.get_balances() for address, entry in balances.items(): if address not in eth_balances: self.msg_aggregator.add_error( f'The owner of a vault {address} was not in the tracked addresses.' f' This should not happen and is probably a bug. Please report it.', ) else: eth_balances[address] += entry self.totals += entry proxy_mappings = vaults_module._get_accounts_having_maker_proxy() proxy_to_address = {} proxy_addresses = [] for user_address, proxy_address in proxy_mappings.items(): proxy_to_address[proxy_address] = user_address proxy_addresses.append(proxy_address) ethtokens = EthTokens(database=self.database, ethereum=self.ethereum) try: balance_result, token_usd_price = ethtokens.query_tokens_for_addresses( addresses=proxy_addresses, force_detection=False, ) except BadFunctionCallOutput as e: log.error( 'Assuming unsynced chain. Got web3 BadFunctionCallOutput ' 'exception: {}'.format(str(e)), ) raise EthSyncError( 'Tried to use the ethereum chain of the provided client to query ' 'token balances but the chain is not synced.', ) from e new_result = {proxy_to_address[x]: v for x, v in balance_result.items()} self._update_balances_after_token_query( action=AccountAction.DSR_PROXY_APPEND, balance_result=new_result, token_usd_price=token_usd_price, ) # also query defi balances to get liabilities defi_balances_map = self.defichad.query_defi_balances(proxy_addresses) for proxy_address, defi_balances in defi_balances_map.items(): self._add_account_defi_balances_to_token_and_totals( account=proxy_to_address[proxy_address], balances=defi_balances, ) adex_module = self.adex if adex_module is not None and self.premium is not None: adex_balances = adex_module.get_balances(addresses=self.accounts.eth) for address, pool_balances in adex_balances.items(): for pool_balance in pool_balances: eth_balances[address].assets[A_ADX] += pool_balance.adx_balance self.totals.assets[A_ADX] += pool_balance.adx_balance eth_balances[address].assets[A_DAI] += pool_balance.dai_unclaimed_balance self.totals.assets[A_DAI] += pool_balance.dai_unclaimed_balance # Count ETH staked in Eth2 beacon chain self.account_for_staked_eth2_balances(addresses=self.accounts.eth, at_addition=False) # Finally count the balances detected in various protocols in defi balances self.add_defi_balances_to_token_and_totals()
def test_associated_locations(database): """Test that locations imported in different places are correctly stored in database""" # Add trades from different locations trades = [Trade( timestamp=Timestamp(1595833195), location=Location.CRYPTOCOM, base_asset=A_ETH, quote_asset=A_EUR, trade_type=TradeType.BUY, amount=AssetAmount(FVal('1.0')), rate=Price(FVal('281.14')), fee=Fee(ZERO), fee_currency=A_USD, link='', notes='', ), Trade( timestamp=Timestamp(1587825824), location=Location.CRYPTOCOM, base_asset=A_ETH, quote_asset=A_EUR, trade_type=TradeType.BUY, amount=AssetAmount(FVal('50.0')), rate=Price(FVal('3.521')), fee=Fee(ZERO), fee_currency=A_USD, link='', notes='', ), Trade( timestamp=Timestamp(1596014214), location=Location.BLOCKFI, base_asset=A_ETH, quote_asset=A_EUR, trade_type=TradeType.BUY, amount=AssetAmount(FVal('50.0')), rate=Price(FVal('3.521')), fee=Fee(ZERO), fee_currency=A_USD, link='', notes='', ), Trade( timestamp=Timestamp(1565888464), location=Location.NEXO, base_asset=A_ETH, quote_asset=A_EUR, trade_type=TradeType.BUY, amount=AssetAmount(FVal('50.0')), rate=Price(FVal('3.521')), fee=Fee(ZERO), fee_currency=A_USD, link='', notes='', ), Trade( timestamp=Timestamp(1596014214), location=Location.NEXO, base_asset=A_ETH, quote_asset=A_EUR, trade_type=TradeType.BUY, amount=AssetAmount(FVal('50.0')), rate=Price(FVal('3.521')), fee=Fee(ZERO), fee_currency=A_USD, link='', notes='', ), Trade( timestamp=Timestamp(1612051199), location=Location.BLOCKFI, base_asset=symbol_to_asset_or_token('USDC'), quote_asset=symbol_to_asset_or_token('LTC'), trade_type=TradeType.BUY, amount=AssetAmount(FVal('6404.6')), rate=Price(FVal('151.6283999982779809352223797')), fee=None, fee_currency=None, link='', notes='One Time', ), Trade( timestamp=Timestamp(1595833195), location=Location.POLONIEX, base_asset=A_ETH, quote_asset=A_EUR, trade_type=TradeType.BUY, amount=AssetAmount(FVal('1.0')), rate=Price(FVal('281.14')), fee=Fee(ZERO), fee_currency=A_USD, link='', notes='', ), Trade( timestamp=Timestamp(1596429934), location=Location.COINBASE, base_asset=A_ETH, quote_asset=A_EUR, trade_type=TradeType.BUY, amount=AssetAmount(FVal('0.00061475')), rate=Price(FVal('309.0687271248474989833265555')), fee=Fee(ZERO), fee_currency=A_USD, link='', notes='', ), Trade( timestamp=Timestamp(1596429934), location=Location.EXTERNAL, base_asset=A_ETH, quote_asset=A_EUR, trade_type=TradeType.BUY, amount=AssetAmount(FVal('1')), rate=Price(FVal('320')), fee=Fee(ZERO), fee_currency=A_USD, link='', notes='', )] # Add multiple entries for same exchange + connected exchange database.add_trades(trades) kraken_api_key1 = ApiKey('kraken_api_key') kraken_api_secret1 = ApiSecret(b'kraken_api_secret') kraken_api_key2 = ApiKey('kraken_api_key2') kraken_api_secret2 = ApiSecret(b'kraken_api_secret2') binance_api_key = ApiKey('binance_api_key') binance_api_secret = ApiSecret(b'binance_api_secret') # add mock kraken and binance database.add_exchange('kraken1', Location.KRAKEN, kraken_api_key1, kraken_api_secret1) database.add_exchange('kraken2', Location.KRAKEN, kraken_api_key2, kraken_api_secret2) database.add_exchange('binance', Location.BINANCE, binance_api_key, binance_api_secret) # Add uniswap and sushiswap events database.add_amm_events([ LiquidityPoolEvent( tx_hash='0x47ea26957ce09e84a51b51dfdab6a4ac1c3672a372eef77b15ef7677174ac847', log_index=23, address=ChecksumEthAddress('0x3163Bb273E8D9960Ce003fD542bF26b4C529f515'), timestamp=Timestamp(1590011534), event_type=EventType.MINT_SUSHISWAP, pool_address=ChecksumEthAddress('0xa2107FA5B38d9bbd2C461D6EDf11B11A50F6b974'), token0=EthereumToken('0x514910771AF9Ca656af840dff83E8264EcF986CA'), token1=EthereumToken('0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'), amount0=FVal('3.313676003468974932'), amount1=FVal('0.064189269269768657'), usd_price=FVal('26.94433946158740371839009166230438'), lp_amount=FVal('0.460858304063739927'), ), ]) database.add_amm_swaps([ AMMSwap( tx_hash='0xa54bf4c68d435e3c8f432fd7e62b7f8aca497a831a3d3fca305a954484ddd7b2', log_index=208, address=ChecksumEthAddress('0xa2107FA5B38d9bbd2C461D6EDf11B11A50F6b974'), from_address=string_to_ethereum_address('0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F'), to_address=string_to_ethereum_address('0xC9cB53B48A2f3A9e75982685644c1870F1405CCb'), timestamp=Timestamp(1609301469), location=Location.UNISWAP, token0=EthereumToken('0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2'), token1=EthereumToken('0xdAC17F958D2ee523a2206206994597C13D831ec7'), amount0_in=AssetAmount(FVal('2.6455727132446468')), amount1_in=AssetAmount(ZERO), amount0_out=AssetAmount(ZERO), amount1_out=AssetAmount(FVal('1936.810111')), ), ]) database.add_balancer_events([ BalancerEvent( tx_hash='0xa54bf4c68d435e3c8f432fd7e62b7f8aca497a831a3d3fca305a954484ddd7b3', log_index=23, address=ChecksumEthAddress('0xa2107FA5B38d9bbd2C461D6EDf11B11A50F6b974'), timestamp=Timestamp(1609301469), event_type=BalancerBPTEventType.MINT, pool_address_token=EthereumToken('0x514910771AF9Ca656af840dff83E8264EcF986CA'), lp_balance=Balance(amount=FVal(2), usd_value=FVal(3)), amounts=[ AssetAmount(FVal(1)), AssetAmount(FVal(2)), ], ), ]) expected_locations = { Location.KRAKEN, Location.BINANCE, Location.BLOCKFI, Location.NEXO, Location.CRYPTOCOM, Location.POLONIEX, Location.COINBASE, Location.EXTERNAL, Location.SUSHISWAP, Location.UNISWAP, Location.BALANCER, } assert set(database.get_associated_locations()) == expected_locations
def query_balances(self) -> ExchangeQueryBalances: """Return the account balances on Bistamp The balance endpoint returns a dict where the keys (str) are related to assets and the values (str) amounts. The keys that end with `_balance` contain the exact amount of an asset the account is holding (available amount + orders amount, per asset). """ response = self._api_query('balance') if response.status_code != HTTPStatus.OK: result, msg = self._process_unsuccessful_response( response=response, case='balances', ) return result, msg try: response_dict = rlk_jsonloads_dict(response.text) except JSONDecodeError as e: msg = f'Bitstamp returned invalid JSON response: {response.text}.' log.error(msg) raise RemoteError(msg) from e assets_balance: Dict[Asset, Balance] = {} for entry, amount in response_dict.items(): amount = FVal(amount) if not entry.endswith('_balance') or amount == ZERO: continue symbol = entry.split('_')[0] # If no `_`, defaults to entry try: asset = asset_from_bitstamp(symbol) except DeserializationError as e: log.error( 'Error processing a Bitstamp balance.', entry=entry, error=str(e), ) self.msg_aggregator.add_error( 'Failed to deserialize a Bitstamp balance. ' 'Check logs for details. Ignoring it.', ) continue except (UnknownAsset, UnsupportedAsset) as e: log.error(str(e)) asset_tag = 'unknown' if isinstance( e, UnknownAsset) else 'unsupported' self.msg_aggregator.add_warning( f'Found {asset_tag} Bistamp asset {e.asset_name}. Ignoring its balance query.', ) continue try: usd_price = Inquirer().find_usd_price(asset=asset) except RemoteError as e: log.error(str(e)) self.msg_aggregator.add_error( f'Error processing Bitstamp balance result due to inability to ' f'query USD price: {str(e)}. Skipping balance entry.', ) continue assets_balance[asset] = Balance( amount=amount, usd_value=amount * usd_price, ) return assets_balance, ''
def _get_liquidation_events( self, address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: """https://compound.finance/docs/ctokens#liquidate-borrow""" param_types, param_values = get_common_params(from_ts, to_ts, address) result = self.graph.query( # type: ignore querystr= """liquidationEvents (where: {blockTime_lte: $end_ts, blockTime_gte: $start_ts, from: $address}) { id amount from blockNumber blockTime cTokenSymbol underlyingSymbol underlyingRepayAmount }}""", param_types=param_types, param_values=param_values, ) events = [] for entry in result['liquidationEvents']: ctoken_symbol = entry['cTokenSymbol'] try: ctoken_asset = Asset(ctoken_symbol) except UnknownAsset: log.error( f'Found unexpected cTokenSymbol {ctoken_symbol} during graph query. Skipping.' ) continue underlying_symbol = entry['underlyingSymbol'] try: underlying_asset = Asset(underlying_symbol) except UnknownAsset: log.error( f'Found unexpected token symbol {underlying_symbol} during ' f'graph query. Skipping.', ) continue timestamp = entry['blockTime'] # Amount/value of underlying asset paid by liquidator # Essentially liquidator covers part of the debt of the user debt_amount = FVal(entry['underlyingRepayAmount']) underlying_usd_price = query_usd_price_zero_if_error( asset=underlying_asset, time=timestamp, location='compound liquidation underlying asset', msg_aggregator=self.msg_aggregator, ) debt_usd_value = debt_amount * underlying_usd_price # Amount/value of ctoken_asset lost to the liquidator # This is what the liquidator gains at a discount liquidated_amount = FVal(entry['amount']) liquidated_usd_price = query_usd_price_zero_if_error( asset=ctoken_asset, time=timestamp, location='compound liquidation ctoken asset', msg_aggregator=self.msg_aggregator, ) liquidated_usd_value = liquidated_amount * liquidated_usd_price parse_result = _get_txhash_and_logidx(entry['id']) if parse_result is None: log.error( f'Found unprocessable liquidation id from the graph {entry["id"]}. Skipping', ) continue gained_value = Balance(amount=debt_amount, usd_value=debt_usd_value) lost_value = Balance(amount=liquidated_amount, usd_value=liquidated_usd_value) events.append( CompoundEvent( event_type='liquidation', address=address, block_number=entry['blockNumber'], timestamp=timestamp, asset=underlying_asset, value=gained_value, to_asset=ctoken_asset, to_value=lost_value, realized_pnl=None, tx_hash=parse_result[0], log_index=parse_result[1], )) return events
def _calculate_interest_and_profit( self, user_address: ChecksumEthAddress, user_result: Dict[str, Any], actions: List[AaveDepositWithdrawalEvent], balances: AaveBalances, db_interest_events: Set[AaveInterestEvent], from_ts: Timestamp, to_ts: Timestamp, ) -> Tuple[List[AaveInterestEvent], Dict[Asset, Balance]]: reserve_history = {} for reserve in user_result['reserves']: pairs = reserve['id'].split('0x') if len(pairs) != 4: log.error( f'Expected to find 3 addresses in graph\'s reserve history id ' f'but the encountered id does not match: {reserve["id"]}. Skipping entry...', ) continue try: address_s = '0x' + pairs[2] reserve_address = deserialize_ethereum_address(address_s) except DeserializationError: log.error( f'Failed to deserialize reserve address {address_s} ' f'Skipping reserve address {address_s} for user address {user_address}', ) continue atoken_history = _parse_atoken_balance_history( history=reserve['aTokenBalanceHistory'], from_ts=from_ts, to_ts=to_ts, ) reserve_history[reserve_address] = atoken_history interest_events: List[AaveInterestEvent] = [] atoken_balances: Dict[Asset, FVal] = defaultdict(FVal) used_history_indices = set() total_earned: Dict[Asset, Balance] = defaultdict(Balance) # Go through the existing db interest events and add total earned for interest_event in db_interest_events: total_earned[interest_event.asset] += interest_event.value # Create all new interest events in the query actions.sort(key=lambda event: event.timestamp) for action in actions: if action.event_type == 'deposit': atoken_balances[action.asset] += action.value.amount else: # withdrawal atoken_balances[action.asset] -= action.value.amount action_reserve_address = asset_to_aave_reserve_address( action.asset) if action_reserve_address is None: log.error( f'Could not find aave reserve address for asset' f'{action.asset} in an aave graph response.' f' Skipping entry...', ) continue history = reserve_history.get(action_reserve_address, None) if history is None: log.error( f'Could not find aTokenBalanceHistory for reserve ' f'{action_reserve_address} in an aave graph response.' f' Skipping entry...', ) continue history.sort(key=lambda event: event.timestamp) for idx, entry in enumerate(history): if idx in used_history_indices: continue used_history_indices.add(idx) if entry.tx_hash == action.tx_hash: diff = entry.balance - atoken_balances[action.asset] if diff != ZERO: atoken_balances[action.asset] = entry.balance asset = asset_to_atoken(asset=action.asset, version=entry.version) if asset is None: log.error( f'Could not find corresponding aToken to ' f'{action.asset.identifier} during an aave graph query' f' Skipping entry...', ) continue timestamp = entry.timestamp usd_price = query_usd_price_zero_if_error( asset=asset, time=timestamp, location= f'aave interest event {entry.tx_hash} from graph query', msg_aggregator=self.msg_aggregator, ) earned_balance = Balance(amount=diff, usd_value=diff * usd_price) interest_event = AaveInterestEvent( event_type='interest', asset=asset, value=earned_balance, block_number=0, # can't get from graph query timestamp=timestamp, tx_hash=entry.tx_hash, # not really the log index, but should also be unique log_index=action.log_index + 1, ) if interest_event in db_interest_events: # This should not really happen since we already query # historical atoken balance history in the new range log.warning( f'During aave subgraph query interest and profit calculation ' f'tried to generate interest event {interest_event} that ' f'already existed in the DB ', ) continue interest_events.append(interest_event) total_earned[asset] += earned_balance # and once done break off the loop break # else this atoken history is not due to an action, so skip it. # It's probably due to a simple transfer atoken_balances[action.asset] = entry.balance if action.event_type == 'deposit': atoken_balances[action.asset] += action.value.amount else: # withdrawal atoken_balances[action.asset] -= action.value.amount # Take aave unpaid interest into account for balance_asset, lending_balance in balances.lending.items(): atoken = asset_to_atoken(balance_asset, version=lending_balance.version) if atoken is None: log.error( f'Could not find corresponding v{lending_balance.version} aToken to ' f'{balance_asset.identifier} during an aave graph unpaid interest ' f'query. Skipping entry...', ) continue if lending_balance.version == 1: method = 'principalBalanceOf' abi = ATOKEN_ABI else: method = 'scaledBalanceOf' abi = ATOKEN_V2_ABI principal_balance = self.ethereum.call_contract( contract_address=atoken.ethereum_address, abi=abi, method_name=method, arguments=[user_address], ) unpaid_interest = lending_balance.balance.amount - ( principal_balance / (FVal(10)**FVal(atoken.decimals))) # noqa: E501 usd_price = Inquirer().find_usd_price(atoken) total_earned[atoken] += Balance( amount=unpaid_interest, usd_value=unpaid_interest * usd_price, ) return interest_events, total_earned
def _process_events( self, events: List[CompoundEvent], given_defi_balances: GIVEN_DEFI_BALANCES, ) -> Tuple[ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS]: """Processes all events and returns a dictionary of earned balances totals""" assets: ADDRESS_TO_ASSETS = defaultdict(lambda: defaultdict(Balance)) loss_assets: ADDRESS_TO_ASSETS = defaultdict( lambda: defaultdict(Balance)) rewards_assets: ADDRESS_TO_ASSETS = defaultdict( lambda: defaultdict(Balance)) profit_so_far: ADDRESS_TO_ASSETS = defaultdict( lambda: defaultdict(Balance)) loss_so_far: ADDRESS_TO_ASSETS = defaultdict( lambda: defaultdict(Balance)) liquidation_profit: ADDRESS_TO_ASSETS = defaultdict( lambda: defaultdict(Balance)) balances = self.get_balances(given_defi_balances) for idx, event in enumerate(events): if event.event_type == 'mint': assets[event.address][event.asset] -= event.value elif event.event_type == 'redeem': assert event.to_asset, 'redeem events should have a to_asset' assert event.to_value, 'redeem events should have a to_value' profit_amount = ( assets[event.address][event.to_asset].amount + event.to_value.amount - profit_so_far[event.address][event.to_asset].amount) profit: Optional[Balance] if profit_amount >= 0: usd_price = query_usd_price_zero_if_error( asset=event.to_asset, time=event.timestamp, location='comp redeem event processing', msg_aggregator=self.msg_aggregator, ) profit = Balance(profit_amount, profit_amount * usd_price) profit_so_far[event.address][event.to_asset] += profit else: profit = None assets[event.address][event.to_asset] += event.to_value events[idx] = event._replace( realized_pnl=profit) # TODO: maybe not named tuple? elif event.event_type == 'borrow': loss_assets[event.address][event.asset] -= event.value elif event.event_type == 'repay': loss_amount = (loss_assets[event.address][event.asset].amount + event.value.amount - loss_so_far[event.address][event.asset].amount) loss: Optional[Balance] if loss_amount >= 0: usd_price = query_usd_price_zero_if_error( asset=event.asset, time=event.timestamp, location='comp repay event processing', msg_aggregator=self.msg_aggregator, ) loss = Balance(loss_amount, loss_amount * usd_price) loss_so_far[event.address][event.asset] += loss else: loss = None loss_assets[event.address][event.asset] += event.value events[idx] = event._replace( realized_pnl=loss) # TODO: maybe not named tuple? elif event.event_type == 'liquidation': assert event.to_asset, 'liquidation events should have a to_asset' # Liquidator covers part of the borrowed amount loss_assets[event.address][event.asset] += event.value liquidation_profit[event.address][event.asset] += event.value # Liquidator receives discounted to_asset loss_assets[event.address][event.to_asset] += event.to_value loss_so_far[event.address][event.to_asset] += event.to_value elif event.event_type == 'comp': rewards_assets[event.address][A_COMP] += event.value for address, bentry in balances.items(): for asset, entry in bentry['lending'].items(): profit_amount = (profit_so_far[address][asset].amount + entry.balance.amount + assets[address][asset].amount) if profit_amount < 0: log.error( f'In compound we calculated negative profit. Should not happen. ' f'address: {address} asset: {asset} ', ) else: usd_price = Inquirer().find_usd_price(Asset(asset)) profit_so_far[address][asset] = Balance( amount=profit_amount, usd_value=profit_amount * usd_price, ) for asset, entry in bentry['borrowing'].items(): remaining = entry.balance + loss_assets[address][asset] if remaining.amount < ZERO: continue loss_so_far[address][asset] += remaining if loss_so_far[address][asset].usd_value < ZERO: amount = loss_so_far[address][asset].amount loss_so_far[address][asset] = Balance( amount=amount, usd_value=amount * Inquirer().find_usd_price(Asset(asset)), ) for asset, entry in bentry['rewards'].items(): rewards_assets[address][asset] += entry.balance return profit_so_far, loss_so_far, liquidation_profit, rewards_assets
def _query_ethereum_tokens( self, action: AccountAction, given_accounts: Optional[List[ChecksumEthAddress]] = None, force_detection: bool = False, ) -> None: """Queries ethereum token balance via either etherscan or ethereum node By default queries all accounts but can also be given a specific list of accounts to query. Should come here during addition of a new account or querying of all token balances. May raise: - RemoteError if an external service such as Etherscan or cryptocompare is queried and there is a problem with its query. - EthSyncError if querying the token balances through a provided ethereum client and the chain is not synced """ if given_accounts is None: accounts = self.accounts.eth else: accounts = given_accounts ethtokens = EthTokens(database=self.database, ethereum=self.ethereum) try: balance_result, token_usd_price = ethtokens.query_tokens_for_addresses( addresses=accounts, force_detection=force_detection, ) except BadFunctionCallOutput as e: log.error( 'Assuming unsynced chain. Got web3 BadFunctionCallOutput ' 'exception: {}'.format(str(e)), ) raise EthSyncError( 'Tried to use the ethereum chain of the provided client to query ' 'token balances but the chain is not synced.', ) # Update the per account token balance and usd value token_totals: Dict[EthereumToken, FVal] = defaultdict(FVal) eth_balances = self.balances.eth for account, token_balances in balance_result.items(): for token, token_balance in token_balances.items(): if token_usd_price[token] == ZERO: # skip tokens that have no price continue token_totals[token] += token_balance usd_value = token_balance * token_usd_price[token] eth_balances[account].assets[token] = Balance( amount=token_balance, usd_value=usd_value, ) # Update the totals for token, token_total_balance in token_totals.items(): if action == AccountAction.QUERY: self.totals.assets[token] = Balance( amount=token_total_balance, usd_value=token_total_balance * token_usd_price[token], ) else: # addition self.totals.assets[token] += Balance( amount=token_total_balance, usd_value=token_total_balance * token_usd_price[token], )
def deserialize_pool_share( raw_pool_share: Dict[str, Any], ) -> Tuple[ChecksumEthAddress, BalancerPoolBalance]: """May raise DeserializationError""" try: raw_user_address = raw_pool_share['userAddress']['id'] user_amount = deserialize_asset_amount(raw_pool_share['balance']) raw_pool = raw_pool_share['poolId'] total_amount = deserialize_asset_amount(raw_pool['totalShares']) raw_address = raw_pool['id'] raw_tokens = raw_pool['tokens'] total_weight = deserialize_asset_amount(raw_pool['totalWeight']) except KeyError as e: raise DeserializationError(f'Missing key: {str(e)}.') from e if total_weight == ZERO: raise DeserializationError('Pool weight is zero.') user_address = deserialize_ethereum_address(raw_user_address) address = deserialize_ethereum_address(raw_address) pool_tokens = [] for raw_token in raw_tokens: try: raw_token_address = raw_token['address'] token_symbol = raw_token['symbol'] token_name = raw_token['name'] token_decimals = raw_token['decimals'] token_total_amount = deserialize_asset_amount(raw_token['balance']) token_weight = deserialize_asset_amount(raw_token['denormWeight']) except KeyError as e: raise DeserializationError(f'Missing key: {str(e)}.') from e token_address = deserialize_ethereum_address(raw_token_address) token = get_ethereum_token( symbol=token_symbol, ethereum_address=token_address, name=token_name, decimals=token_decimals, ) if token_total_amount == ZERO: raise DeserializationError( f'Token {token.identifier} balance is zero.') token_user_amount = user_amount / total_amount * token_total_amount weight = token_weight * 100 / total_weight pool_token = BalancerPoolTokenBalance( token=token, total_amount=token_total_amount, user_balance=Balance(amount=token_user_amount), weight=weight, ) pool_tokens.append(pool_token) pool_tokens.sort(key=lambda pool_token: pool_token.token.ethereum_address) pool = BalancerPoolBalance( address=address, tokens=pool_tokens, total_amount=total_amount, user_balance=Balance(amount=user_amount), ) return user_address, pool
def query_balances(self) -> ExchangeQueryBalances: try: resp = self.api_query_dict('returnCompleteBalances', {"account": "all"}) except RemoteError as e: msg = ('Poloniex API request failed. Could not reach poloniex due ' 'to {}'.format(e)) log.error(msg) return None, msg assets_balance: Dict[Asset, Balance] = {} for poloniex_asset, v in resp.items(): try: available = deserialize_asset_amount(v['available']) on_orders = deserialize_asset_amount(v['onOrders']) except DeserializationError as e: self.msg_aggregator.add_error( f'Could not deserialize amount from poloniex due to ' f'{str(e)}. Ignoring its balance query.', ) continue if available != ZERO or on_orders != ZERO: try: asset = asset_from_poloniex(poloniex_asset) except UnsupportedAsset as e: self.msg_aggregator.add_warning( f'Found unsupported poloniex asset {e.asset_name}. ' f' Ignoring its balance query.', ) continue except UnknownAsset as e: self.msg_aggregator.add_warning( f'Found unknown poloniex asset {e.asset_name}. ' f' Ignoring its balance query.', ) continue except DeserializationError: log.error( f'Unexpected poloniex asset type. Expected string ' f' but got {type(poloniex_asset)}', ) self.msg_aggregator.add_error( 'Found poloniex asset entry with non-string type. ' ' Ignoring its balance query.', ) continue if asset == A_LEND: # poloniex mistakenly returns LEND balances continue # https://github.com/rotki/rotki/issues/2530 try: usd_price = Inquirer().find_usd_price(asset=asset) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing poloniex balance entry due to inability to ' f'query USD price: {str(e)}. Skipping balance entry', ) continue amount = available + on_orders usd_value = amount * usd_price assets_balance[asset] = Balance( amount=amount, usd_value=usd_value, ) log.debug( 'Poloniex balance query', currency=asset, amount=amount, usd_value=usd_value, ) return assets_balance, ''
def test_get_eth2_staked_amount_fetch_from_db( # pylint: disable=unused-argument ethereum_manager, call_order, ethereum_manager_connect_at_start, inquirer, price_historian, freezer, ): """ Test new on-chain requests for existing addresses requires a difference of REQUEST_DELTA_TS since last used query range `end_ts`. """ freezer.move_to(datetime.fromtimestamp(EXPECTED_DEPOSITS[0].timestamp)) ts_now = int(datetime.now().timestamp()) # 1604506685 database = MagicMock() database.get_used_query_range.side_effect = [ (Timestamp(ts_now - (2 * REQUEST_DELTA_TS)), Timestamp(ts_now)), (Timestamp(ts_now - (2 * REQUEST_DELTA_TS)), Timestamp(ts_now)), (Timestamp(ts_now - (2 * REQUEST_DELTA_TS)), Timestamp(ts_now)), ] database.get_eth2_deposits.side_effect = [ [], # no on-chain request, nothing in DB [], # no on-chain request, nothing in DB [EXPECTED_DEPOSITS[0]], # on-chain request, deposit in DB ] expected_balance = {ADDR1: Balance(amount=FVal(32), usd_value=FVal(48))} with patch( 'rotkehlchen.chain.ethereum.eth2._get_eth2_staked_amount_onchain', ) as mock_get_eth2_staked_amount_onchain: # 3rd call return mock_get_eth2_staked_amount_onchain.return_value = [ EXPECTED_DEPOSITS[0] ] wait_until_all_nodes_connected( ethereum_manager_connect_at_start=ethereum_manager_connect_at_start, ethereum=ethereum_manager, ) message_aggregator = MessagesAggregator() # First call deposit_results_onchain = get_eth2_staked_amount( ethereum=ethereum_manager, addresses=[ADDR1], has_premium=True, msg_aggregator=message_aggregator, database=database, ) assert deposit_results_onchain.deposits == [] assert deposit_results_onchain.totals == {} mock_get_eth2_staked_amount_onchain.assert_not_called() # NB: Move time to ts_now + REQUEST_DELTA_TS - 1s freezer.move_to(datetime.fromtimestamp(ts_now + REQUEST_DELTA_TS - 1)) # Second call deposit_results_onchain = get_eth2_staked_amount( ethereum=ethereum_manager, addresses=[ADDR1], has_premium=True, msg_aggregator=message_aggregator, database=database, ) assert deposit_results_onchain.deposits == [] assert deposit_results_onchain.totals == {} mock_get_eth2_staked_amount_onchain.assert_not_called() # NB: Move time to ts_now + REQUEST_DELTA_TS (triggers request) freezer.move_to(datetime.fromtimestamp(ts_now + REQUEST_DELTA_TS)) # Third call deposit_results_onchain = get_eth2_staked_amount( ethereum=ethereum_manager, addresses=[ADDR1], has_premium=True, msg_aggregator=message_aggregator, database=database, ) assert deposit_results_onchain.deposits == [EXPECTED_DEPOSITS[0]] assert deposit_results_onchain.totals == expected_balance mock_get_eth2_staked_amount_onchain.assert_called_with( ethereum=ethereum_manager, addresses=[ADDR1], has_premium=True, msg_aggregator=message_aggregator, from_ts=Timestamp(ts_now), to_ts=Timestamp(ts_now + REQUEST_DELTA_TS), )
assert FVal(vault['vault_value']['usd_value']) > ZERO assert FVal(vault['underlying_value']['amount']) > ZERO assert FVal(vault['underlying_value']['usd_value']) > ZERO # Expected events as of writing of the test. USD values are all mocked. EXPECTED_HISTORY = { 'YALINK Vault': YearnVaultHistory( events=[ YearnVaultEvent( event_type='deposit', block_number=10693331, timestamp=Timestamp(1597877037), from_asset=EthereumToken('aLINK'), from_value=Balance(amount=FVal('389.42925099069838547'), usd_value=ONE), to_asset=EthereumToken('yaLINK'), to_value=Balance(amount=FVal('378.670298739289527979'), usd_value=ONE), realized_pnl=None, tx_hash= '0x14bbb454cfe3bfbef4e7ea2b03e7aac022048480b3d2f81ea8d191f0543848c4', log_index=102, ), YearnVaultEvent( event_type='deposit', block_number=10843734, timestamp=Timestamp(1599868806), from_asset=EthereumToken('aLINK'), from_value=Balance(amount=FVal('72.192501610488361536'), usd_value=ONE),
def query_balances( self) -> Tuple[Optional[Dict[Asset, Dict[str, FVal]]], str]: """Return the account exchange balances on Bitfinex The wallets endpoint returns a list where each item is a currency wallet. Each currency wallet has type (i.e. exchange, margin, funding), currency, balance, etc. Currencies (tickers) are in Bitfinex format and must be standardized. Endpoint documentation: https://docs.bitfinex.com/reference#rest-auth-wallets """ self.first_connection() response = self._api_query('wallets') if response.status_code != HTTPStatus.OK: result, msg = self._process_unsuccessful_response( response=response, case='balances', ) return result, msg try: response_list = rlk_jsonloads_list(response.text) except JSONDecodeError as e: msg = f'{self.name} returned invalid JSON response: {response.text}.' log.error(msg) raise RemoteError(msg) from e # Wallet items indices currency_index = 1 balance_index = 2 asset_balance: DefaultDict[Asset, Balance] = defaultdict(Balance) for wallet in response_list: if len(wallet) < API_WALLET_MIN_RESULT_LENGTH or wallet[ balance_index] <= 0: log.error( f'Error processing a {self.name} balance result. ' f'Found less items than expected', wallet=wallet, ) self.msg_aggregator.add_error( f'Failed to deserialize a {self.name} balance result. ' f'Check logs for details. Ignoring it.', ) continue try: asset = asset_from_bitfinex( bitfinex_name=wallet[currency_index], currency_map=self.currency_map, ) except (UnknownAsset, UnsupportedAsset) as e: asset_tag = 'unknown' if isinstance( e, UnknownAsset) else 'unsupported' self.msg_aggregator.add_warning( f'Found {asset_tag} {self.name} asset {e.asset_name} due to: {str(e)}. ' f'Ignoring its balance query.', ) continue try: usd_price = Inquirer().find_usd_price(asset=asset) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing {self.name} balance result due to inability to ' f'query USD price: {str(e)}. Skipping balance result.', ) continue amount = FVal(wallet[balance_index]) asset_balance[asset] += Balance( amount=amount, usd_value=amount * usd_price, ) return {a: b.to_dict() for a, b in asset_balance.items()}, ''
def get_events_for_atoken_and_address( self, user_address: ChecksumEthAddress, atoken: EthereumToken, deposit_events: List[Dict[str, Any]], withdraw_events: List[Dict[str, Any]], from_block: int, to_block: int, ) -> List[AaveSimpleEvent]: """This function should be entered while holding the history_lock semaphore""" argument_filters = { 'from': ZERO_ADDRESS, 'to': user_address, } mint_events = self.ethereum.get_logs( contract_address=atoken.ethereum_address, abi=ATOKEN_ABI, event_name='Transfer', argument_filters=argument_filters, from_block=from_block, to_block=to_block, ) mint_data = set() mint_data_to_log_index = {} for event in mint_events: amount = hexstr_to_int(event['data']) if amount == 0: continue # first mint can be for 0. Ignore entry = ( event['blockNumber'], amount, self.ethereum.get_event_timestamp(event), event['transactionHash'], ) mint_data.add(entry) mint_data_to_log_index[entry] = event['logIndex'] reserve_asset = _atoken_to_reserve_asset(atoken) reserve_address, decimals = _get_reserve_address_decimals( reserve_asset.identifier) aave_events = [] for event in deposit_events: if hex_or_bytes_to_address(event['topics'][1]) == reserve_address: # first 32 bytes of the data are the amount deposit = hexstr_to_int(event['data'][:66]) block_number = event['blockNumber'] timestamp = self.ethereum.get_event_timestamp(event) tx_hash = event['transactionHash'] log_index = event['logIndex'] # If there is a corresponding deposit event remove the minting event data entry = (block_number, deposit, timestamp, tx_hash) if entry in mint_data: mint_data.remove(entry) del mint_data_to_log_index[entry] usd_price = query_usd_price_zero_if_error( asset=reserve_asset, time=timestamp, location='aave deposit', msg_aggregator=self.msg_aggregator, ) deposit_amount = deposit / (FVal(10)**FVal(decimals)) aave_events.append( AaveSimpleEvent( event_type='deposit', asset=reserve_asset, value=Balance( amount=deposit_amount, usd_value=deposit_amount * usd_price, ), block_number=block_number, timestamp=timestamp, tx_hash=tx_hash, log_index=log_index, )) for data in mint_data: usd_price = query_usd_price_zero_if_error( asset=atoken, time=data[2], location='aave interest profit', msg_aggregator=self.msg_aggregator, ) interest_amount = data[1] / (FVal(10)**FVal(decimals)) aave_events.append( AaveSimpleEvent( event_type='interest', asset=atoken, value=Balance( amount=interest_amount, usd_value=interest_amount * usd_price, ), block_number=data[0], timestamp=data[2], tx_hash=data[3], log_index=mint_data_to_log_index[data], )) for event in withdraw_events: if hex_or_bytes_to_address(event['topics'][1]) == reserve_address: # first 32 bytes of the data are the amount withdrawal = hexstr_to_int(event['data'][:66]) block_number = event['blockNumber'] timestamp = self.ethereum.get_event_timestamp(event) tx_hash = event['transactionHash'] usd_price = query_usd_price_zero_if_error( asset=reserve_asset, time=timestamp, location='aave withdrawal', msg_aggregator=self.msg_aggregator, ) withdrawal_amount = withdrawal / (FVal(10)**FVal(decimals)) aave_events.append( AaveSimpleEvent( event_type='withdrawal', asset=reserve_asset, value=Balance( amount=withdrawal_amount, usd_value=withdrawal_amount * usd_price, ), block_number=block_number, timestamp=timestamp, tx_hash=tx_hash, log_index=event['logIndex'], )) return aave_events
def test_query_vaults_usdc(rotkehlchen_api_server, ethereum_accounts): """Check vault info and details for a vault with USDC as collateral""" rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { ethereum_accounts[0]: '0xBE79958661741079679aFf75DbEd713cE71a979d', # 7588 } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", )) vaults = assert_proper_response_with_result(response) vault_7588 = MakerdaoVault( identifier=7588, owner=ethereum_accounts[0], collateral_type='USDC-A', urn='0x56D88244073B2fC17af5B1E6088936D5bAaDc37B', collateral_asset=A_USDC, collateral=Balance(ZERO, ZERO), debt=Balance(ZERO, ZERO), collateralization_ratio=None, liquidation_ratio=FVal('1.03'), liquidation_price=None, stability_fee=FVal('0.04'), ) expected_vaults = [vault_7588.serialize()] assert_serialized_lists_equal( expected_vaults, vaults, ignore_keys=['stability_fee', 'liquidation_ratio'], ) response = requests.get( api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", )) vault_7588_details = { 'identifier': 7588, 'collateral_asset': A_USDC.identifier, 'creation_ts': 1585286480, 'total_interest_owed': '0.00050636718', 'total_liquidated': { 'amount': '0', 'usd_value': '0', }, 'events': [{ 'event_type': 'deposit', 'value': { 'amount': '45', 'usd_value': '45', }, 'timestamp': 1585286480, 'tx_hash': '0x8b553dd0e8ee5385ec91105bf911143666d9df0ecd84c04f288278f7658aa7d6', }, { 'event_type': 'generate', 'value': { 'amount': '20', 'usd_value': '20.46', }, 'timestamp': 1585286480, 'tx_hash': '0x8b553dd0e8ee5385ec91105bf911143666d9df0ecd84c04f288278f7658aa7d6', }, { 'event_type': 'generate', 'value': { 'amount': '15.99', 'usd_value': '16.35777', }, 'timestamp': 1585286769, 'tx_hash': '0xdb861c893a51e4649ff3740cd3658cd4c9b1d048d3b8b4d117f4319bd60aee01', }, { 'event_type': 'payback', 'value': { 'amount': '35.990506367', 'usd_value': '36.818288', }, 'timestamp': 1585290263, 'tx_hash': '0xdd7825fe4a93c6f1ffa25a91b6da2396c229fe16b17242ad5c0bf7962928b2ec', }, { 'event_type': 'withdraw', 'value': { 'amount': '45', 'usd_value': '45', }, 'timestamp': 1585290300, 'tx_hash': '0x97462ebba7ce2467787bf6de25a25c24e538cf8a647919112c5f048b6a293408', }], } details = assert_proper_response_with_result(response) expected_details = [vault_7588_details] assert_serialized_lists_equal(expected_details, details, ignore_keys=['liquidation_ratio'])
def _query_lending_balances( self, balances: DefaultDict[Asset, Balance], ) -> DefaultDict[Asset, Balance]: """Queries binance lending balances and if any found adds them to `balances` May raise: - RemoteError """ data = self.api_query_dict('sapi', 'lending/union/account') positions = data.get('positionAmountVos', None) if positions is None: raise RemoteError( f'Could not find key positionAmountVos in lending account data ' f'{data} returned by {self.name}.', ) for entry in positions: try: amount = deserialize_asset_amount(entry['amount']) if amount == ZERO: continue asset = asset_from_binance(entry['asset']) except UnsupportedAsset as e: self.msg_aggregator.add_warning( f'Found unsupported {self.name} asset {e.asset_name}. ' f'Ignoring its lending balance query.', ) continue except UnknownAsset as e: self.msg_aggregator.add_warning( f'Found unknown {self.name} asset {e.asset_name}. ' f'Ignoring its lending balance query.', ) continue except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' self.msg_aggregator.add_error( f'Error at deserializing {self.name} asset. {msg}. ' f'Ignoring its lending balance query.', ) continue try: usd_price = Inquirer().find_usd_price(asset) except RemoteError as e: self.msg_aggregator.add_error( f'Error processing {self.name} balance entry due to inability to ' f'query USD price: {str(e)}. Skipping balance entry', ) continue balances[asset] += Balance( amount=amount, usd_value=amount * usd_price, ) return balances