def get_basic_contract_info(address: ChecksumEthAddress) -> Dict[str, Any]: """ Query a contract address in pangolin graph node and return basic information as: - Decimals - name - symbol if it is provided in the contract. This method may raise: - BadFunctionCallOutput: If there is an error calling a bad address """ properties = ('decimals', 'symbol', 'name') info: Dict[str, Any] = {} try: # Output contains call status and result graph = Graph('https://api.thegraph.com/subgraphs/name/dasconnor/pangolin-dex') output = graph.query( f'''{{token(id:"{address.lower()}"){{ symbol name decimals }} }} ''', ) token = output['token'] for prop in properties: if prop == 'decimals': info[prop] = int(token[prop]) else: info[prop] = token[prop] except (KeyError, ValueError, TypeError): # If something happens in the connection the output should have # the same length as the tuple of properties return {'decimals': None, 'symbol': None, 'name': None} return info
def __init__( self, ethrpc_endpoint: str, etherscan: Etherscan, database: DBHandler, msg_aggregator: MessagesAggregator, greenlet_manager: GreenletManager, connect_at_start: Sequence[NodeName], eth_rpc_timeout: int = DEFAULT_ETH_RPC_TIMEOUT, ) -> None: log.debug(f'Initializing Ethereum Manager with own rpc endpoint: {ethrpc_endpoint}') self.greenlet_manager = greenlet_manager self.web3_mapping: Dict[NodeName, Web3] = {} self.own_rpc_endpoint = ethrpc_endpoint self.etherscan = etherscan self.msg_aggregator = msg_aggregator self.eth_rpc_timeout = eth_rpc_timeout self.transactions = EthTransactions( database=database, etherscan=etherscan, msg_aggregator=msg_aggregator, ) for node in connect_at_start: self.greenlet_manager.spawn_and_track( after_seconds=None, task_name=f'Attempt connection to {str(node)} ethereum node', exception_is_error=True, method=self.attempt_connect, name=node, ethrpc_endpoint=node.endpoint(self.own_rpc_endpoint), mainnet_check=True, ) self.blocks_subgraph = Graph( 'https://api.thegraph.com/subgraphs/name/blocklytics/ethereum-blocks', )
def test_exception_retries(): """Test an exception raised by Client.execute() triggers the retry logic. """ graph = Graph(TEST_URL_1) param_types = {'$limit': 'Int!'} param_values = {'limit': 1} querystr = format_query_indentation(TEST_QUERY_1.format()) client = MagicMock() client.execute.side_effect = Exception("any message") backoff_factor_patch = patch( 'rotkehlchen.chain.ethereum.graph.RETRY_BACKOFF_FACTOR', new=0, ) client_patch = patch.object(graph, 'client', new=client) with ExitStack() as stack: stack.enter_context(backoff_factor_patch) stack.enter_context(client_patch) with pytest.raises(RemoteError) as e: graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) assert client.execute.call_count == QUERY_RETRY_TIMES assert 'No retries left' in str(e.value)
def test_success_result(): """Test a successful response returns result as expected and does not triggers the retry logic. """ expected_result = {"schema": [{"data1"}, {"data2"}]} graph = Graph(TEST_URL_1) param_types = {'$limit': 'Int!'} param_values = {'limit': 1} querystr = format_query_indentation(TEST_QUERY_1.format()) client = MagicMock() client.execute.return_value = expected_result backoff_factor_patch = patch( 'rotkehlchen.chain.ethereum.graph.RETRY_BACKOFF_FACTOR', return_value=0, ) client_patch = patch.object(graph, 'client', new=client) with ExitStack() as stack: stack.enter_context(backoff_factor_patch) stack.enter_context(client_patch) result = graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) assert client.execute.call_count == 1 assert result == expected_result
def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, ) -> None: try: self.graph = Graph( 'https://api.thegraph.com/subgraphs/name/benesjan/uniswap-v2', ) self.graph_v3 = Graph( 'https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v3', ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format( error_msg=str(e), location=self.location, ), ) raise ModuleInitializationFailure( 'Uniswap subgraph remote error') from e super().__init__( location=Location.UNISWAP, ethereum_manager=ethereum_manager, database=database, premium=premium, msg_aggregator=msg_aggregator, graph=self.graph, )
def __init__( self, ethrpc_endpoint: str, etherscan: Etherscan, msg_aggregator: MessagesAggregator, greenlet_manager: GreenletManager, connect_at_start: Sequence[NodeName], eth_rpc_timeout: int = DEFAULT_EVM_RPC_TIMEOUT, ) -> None: log.debug(f'Initializing Ethereum Manager with own rpc endpoint: {ethrpc_endpoint}') self.greenlet_manager = greenlet_manager self.web3_mapping: Dict[NodeName, Web3] = {} self.own_rpc_endpoint = ethrpc_endpoint self.etherscan = etherscan self.msg_aggregator = msg_aggregator self.eth_rpc_timeout = eth_rpc_timeout self.archive_connection = False self.queried_archive_connection = False for node in connect_at_start: self.greenlet_manager.spawn_and_track( after_seconds=None, task_name=f'Attempt connection to {str(node)} ethereum node', exception_is_error=True, method=self.attempt_connect, name=node, ethrpc_endpoint=node.endpoint(self.own_rpc_endpoint), mainnet_check=True, ) self.blocks_subgraph = Graph( 'https://api.thegraph.com/subgraphs/name/blocklytics/ethereum-blocks', ) # A cache for the erc20 contract info to not requery same one self.contract_info_cache: Dict[ChecksumEthAddress, Dict[str, Any]] = {}
def pairs_and_token_details_from_graph() -> Dict[str, Any]: """Detect the uniswap v2 pool tokens by using the subgraph""" step = 1000 querystr = """ pairs(first:$first, skip: $skip) { id token0{ id symbol name decimals } token1{ id symbol name decimals } } } """ param_types = {'$first': 'Int!', '$skip': 'Int!'} param_values = {'first': step, 'skip': 0} graph = Graph('https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2') contracts = [] total_pairs_num = 0 while True: print( f'Querying graph pairs batch {param_values["skip"]} - {param_values["skip"] + step}' ) result = graph.query(querystr, param_types=param_types, param_values=param_values) for entry in result['pairs']: contracts.append({ 'address': to_checksum_address(entry['id']), 'token0': { 'address': to_checksum_address(entry['token0']['id']), 'name': entry['token0']['name'], 'symbol': entry['token0']['symbol'], 'decimals': int(entry['token0']['decimals']), }, 'token1': { 'address': to_checksum_address(entry['token1']['id']), 'name': entry['token1']['name'], 'symbol': entry['token1']['symbol'], 'decimals': int(entry['token1']['decimals']), }, }) pairs_num = len(result['pairs']) total_pairs_num += pairs_num if pairs_num < step: break param_values['skip'] = total_pairs_num return contracts
def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', msg_aggregator: MessagesAggregator, ) -> None: """May raise RemoteError if we can't connect to the subgraph""" super().__init__( ethereum_manager=ethereum_manager, database=database, msg_aggregator=msg_aggregator, ) self.graph = Graph('https://api.thegraph.com/subgraphs/name/aave/protocol-raw')
def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, ) -> None: self.ethereum = ethereum_manager self.database = database self.msg_aggregator = msg_aggregator self.premium = premium self.graph = Graph( 'https://api.thegraph.com/subgraphs/name/salazarguille/yearn-vaults-v2-subgraph-mainnet' ) # noqa: E501
def __init__( self, ethereum_manager: 'EthereumManager', database: DBHandler, premium: Optional[Premium], msg_aggregator: MessagesAggregator, ): self.ethereum = ethereum_manager self.database = database self.premium = premium self.msg_aggregator = msg_aggregator try: self.graph: Optional[Graph] = Graph( 'https://api.thegraph.com/subgraphs/name/graphprotocol/compound-v2', ) except RemoteError as e: self.graph = None self.msg_aggregator.add_error( f'Could not initialize the Compound subgraph due to {str(e)}. ' f' All compound historical queries are not functioning until this is fixed. ' f'Probably will get fixed with time. If not report it to Rotkis support channel ', ) self.comptroller_address = to_checksum_address( COMPTROLLER_PROXY.call( ethereum=self.ethereum, method_name='comptrollerImplementation', ))
def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, data_directory: Path, ) -> None: self.ethereum = ethereum_manager self.database = database self.premium = premium self.msg_aggregator = msg_aggregator self.data_directory = data_directory self.trades_lock = Semaphore() try: self.graph: Optional[Graph] = Graph( 'https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2', ) except RemoteError as e: self.graph = None self.msg_aggregator.add_error( f'Could not initialize the Uniswap subgraph due to {str(e)}. ' f'All uniswap historical queries are not functioning until this is fixed. ' f'Probably will get fixed with time. If not report it to Rotkis support channel ', )
def __init__( self, ethereum_manager: 'EthereumManager', database: DBHandler, premium: Optional[Premium], msg_aggregator: MessagesAggregator, ): self.ethereum = ethereum_manager self.database = database self.premium = premium self.msg_aggregator = msg_aggregator self.graph = Graph('https://api.thegraph.com/subgraphs/name/graphprotocol/compound-v2') self.comptroller_address = to_checksum_address(self.ethereum.call_contract( contract_address=COMPTROLLER_PROXY.address, abi=COMPTROLLER_PROXY.abi, method_name='comptrollerImplementation', ))
def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, ) -> None: self.ethereum = ethereum_manager self.database = database self.premium = premium self.msg_aggregator = msg_aggregator self.data_directory = database.user_data_dir.parent self.trades_lock = Semaphore() try: self.graph = Graph( 'https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2', ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e))) raise ModuleInitializationFailure('subgraph remote error') from e
def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, ) -> None: super().__init__( ethereum_manager=ethereum_manager, database=database, premium=premium, msg_aggregator=msg_aggregator, ) self.history_lock = Semaphore() try: self.graph = Graph( 'https://api.thegraph.com/subgraphs/name/liquity/liquity', ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(protocol='Liquity', error_msg=str(e)), ) raise ModuleInitializationFailure( 'Liquity Subgraph remote error') from e
def __init__( self, ethrpc_endpoint: str, etherscan: Etherscan, msg_aggregator: MessagesAggregator, greenlet_manager: GreenletManager, connect_at_start: Sequence[NodeName], eth_rpc_timeout: int = DEFAULT_EVM_RPC_TIMEOUT, ) -> None: log.debug( f'Initializing Ethereum Manager with own rpc endpoint: {ethrpc_endpoint}' ) self.greenlet_manager = greenlet_manager self.web3_mapping: Dict[NodeName, Web3] = {} self.own_rpc_endpoint = ethrpc_endpoint self.etherscan = etherscan self.msg_aggregator = msg_aggregator self.eth_rpc_timeout = eth_rpc_timeout self.archive_connection = False self.queried_archive_connection = False for node in connect_at_start: self.greenlet_manager.spawn_and_track( after_seconds=None, task_name=f'Attempt connection to {str(node)} ethereum node', exception_is_error=True, method=self.attempt_connect, name=node, ethrpc_endpoint=node.endpoint(self.own_rpc_endpoint), mainnet_check=True, ) self.blocks_subgraph = Graph( 'https://api.thegraph.com/subgraphs/name/blocklytics/ethereum-blocks', ) # Used by the transactions class. Can't be instantiated there since that is # stateless object and thus wouldn't persist. # Not really happy with this approach but well ... self.tx_per_address: Dict[ChecksumEthAddress, int] = defaultdict(int)
def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, ) -> None: self.ethereum = ethereum_manager self.database = database self.premium = premium self.msg_aggregator = msg_aggregator self.session = requests.session() self.session.headers.update({'User-Agent': 'rotkehlchen'}) self.staking_pool = EthereumConstants().contract('ADEX_STAKING_POOL') try: self.graph = Graph( 'https://api.thegraph.com/subgraphs/name/adexnetwork/adex-protocol-v2', ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e))) raise ModuleInitializationFailure('subgraph remote error') from e
def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, ) -> None: self.ethereum = ethereum_manager self.database = database self.premium = premium self.msg_aggregator = msg_aggregator self.session = requests.session() self.session.headers.update({'User-Agent': 'rotkehlchen'}) try: self.graph: Optional[Graph] = Graph( 'https://api.thegraph.com/subgraphs/name/adexnetwork/adex-protocol', ) except RemoteError as e: self.graph = None self.msg_aggregator.add_error( f'Could not initialize the AdEx subgraph due to {str(e)}. ' f'All AdEx balances and historical queries are not functioning until this is fixed. ' # noqa: E501 f'Probably will get fixed with time. If not report it to Rotki\'s support channel.', # noqa: E501 )
class AaveGraphInquirer(AaveInquirer): """Reads Aave historical data from the graph protocol""" def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', msg_aggregator: MessagesAggregator, premium: Optional[Premium], ) -> None: """May raise RemoteError if we can't connect to the subgraph""" super().__init__( ethereum_manager=ethereum_manager, database=database, premium=premium, msg_aggregator=msg_aggregator, ) self.graph = Graph( 'https://api.thegraph.com/subgraphs/name/aave/protocol-multy-raw') def get_history_for_addresses( self, addresses: List[ChecksumEthAddress], to_block: int, from_timestamp: Timestamp, to_timestamp: Timestamp, aave_balances: Dict[ChecksumEthAddress, AaveBalances], ) -> Dict[ChecksumEthAddress, AaveHistory]: """ Queries aave history for a list of addresses. This function should be entered while holding the history_lock semaphore """ result = {} for address in addresses: history_results = self.get_history_for_address( user_address=address, from_timestamp=from_timestamp, to_timestamp=to_timestamp, balances=aave_balances.get(address, AaveBalances({}, {})), ) if history_results is None: continue result[address] = history_results return result def _get_user_reserves( self, address: ChecksumEthAddress) -> List[AaveUserReserve]: query = self.graph.query( querystr=USER_RESERVES_QUERY.format(address=address.lower()), ) result = [] for entry in query['userReserves']: reserve = entry['reserve'] try: result.append( AaveUserReserve( # The ID of reserve is the address of the asset and the address of the market's LendingPoolAddressProvider, in lower case # noqa: E501 address=deserialize_ethereum_address( reserve['id'][:42]), symbol=reserve['symbol'], )) except DeserializationError: log.error( f'Failed to deserialize reserve address {reserve["id"]} ' f'Skipping reserve address {reserve["id"]} for user address {address}', ) continue return result def _calculate_interest_and_profit( self, user_address: ChecksumEthAddress, user_result: Dict[str, Any], actions: List[AaveDepositWithdrawalEvent], balances: AaveBalances, db_interest_events: Set[AaveInterestEvent], from_ts: Timestamp, to_ts: Timestamp, ) -> Tuple[List[AaveInterestEvent], Dict[Asset, Balance]]: reserve_history = {} for reserve in user_result['reserves']: pairs = reserve['id'].split('0x') if len(pairs) != 4: log.error( f'Expected to find 3 addresses in graph\'s reserve history id ' f'but the encountered id does not match: {reserve["id"]}. Skipping entry...', ) continue try: address_s = '0x' + pairs[2] reserve_address = deserialize_ethereum_address(address_s) except DeserializationError: log.error( f'Failed to deserialize reserve address {address_s} ' f'Skipping reserve address {address_s} for user address {user_address}', ) continue atoken_history = _parse_atoken_balance_history( history=reserve['aTokenBalanceHistory'], from_ts=from_ts, to_ts=to_ts, ) reserve_history[reserve_address] = atoken_history interest_events: List[AaveInterestEvent] = [] atoken_balances: Dict[Asset, FVal] = defaultdict(FVal) used_history_indices = set() total_earned: Dict[Asset, Balance] = defaultdict(Balance) # Go through the existing db interest events and add total earned for interest_event in db_interest_events: total_earned[interest_event.asset] += interest_event.value # Create all new interest events in the query actions.sort(key=lambda event: event.timestamp) for action in actions: if action.event_type == 'deposit': atoken_balances[action.asset] += action.value.amount else: # withdrawal atoken_balances[action.asset] -= action.value.amount action_reserve_address = asset_to_aave_reserve(action.asset) if action_reserve_address is None: log.error( f'Could not find aave reserve address for asset' f'{action.asset} in an aave graph response.' f' Skipping entry...', ) continue history = reserve_history.get(action_reserve_address, None) if history is None: log.error( f'Could not find aTokenBalanceHistory for reserve ' f'{action_reserve_address} in an aave graph response.' f' Skipping entry...', ) continue history.sort(key=lambda event: event.timestamp) for idx, entry in enumerate(history): if idx in used_history_indices: continue used_history_indices.add(idx) if entry.tx_hash == action.tx_hash: diff = entry.balance - atoken_balances[action.asset] if diff != ZERO: atoken_balances[action.asset] = entry.balance asset = ASSET_TO_ATOKENV1.get(action.asset, None) if asset is None: log.error( f'Could not find corresponding aToken to ' f'{action.asset.identifier} during an aave graph query' f' Skipping entry...', ) continue timestamp = entry.timestamp usd_price = query_usd_price_zero_if_error( asset=asset, time=timestamp, location='aave interest event from graph query', msg_aggregator=self.msg_aggregator, ) earned_balance = Balance(amount=diff, usd_value=diff * usd_price) interest_event = AaveInterestEvent( event_type='interest', asset=asset, value=earned_balance, block_number=0, # can't get from graph query timestamp=timestamp, tx_hash=entry.tx_hash, # not really the log index, but should also be unique log_index=action.log_index + 1, ) if interest_event in db_interest_events: # This should not really happen since we already query # historical atoken balance history in the new range log.warning( f'During aave subgraph query interest and profit calculation ' f'tried to generate interest event {interest_event} that ' f'already existed in the DB ', ) continue interest_events.append(interest_event) total_earned[asset] += earned_balance # and once done break off the loop break # else this atoken history is not due to an action, so skip it. # It's probably due to a simple transfer atoken_balances[action.asset] = entry.balance if action.event_type == 'deposit': atoken_balances[action.asset] += action.value.amount else: # withdrawal atoken_balances[action.asset] -= action.value.amount # Take aave unpaid interest into account for balance_asset, lending_balance in balances.lending.items(): atoken = ASSET_TO_ATOKENV1.get(balance_asset, None) if atoken is None: log.error( f'Could not find corresponding aToken to ' f'{balance_asset.identifier} during an aave graph unpair interest ' f'query. Skipping entry...', ) continue principal_balance = self.ethereum.call_contract( contract_address=atoken.ethereum_address, abi=ATOKEN_ABI, method_name='principalBalanceOf', arguments=[user_address], ) unpaid_interest = lending_balance.balance.amount - ( principal_balance / (FVal(10)**FVal(atoken.decimals))) # noqa: E501 usd_price = Inquirer().find_usd_price(atoken) total_earned[atoken] += Balance( amount=unpaid_interest, usd_value=unpaid_interest * usd_price, ) return interest_events, total_earned def _process_events( self, user_address: ChecksumEthAddress, user_result: Dict[str, Any], from_ts: Timestamp, to_ts: Timestamp, deposits: List[AaveDepositWithdrawalEvent], withdrawals: List[AaveDepositWithdrawalEvent], borrows: List[AaveBorrowEvent], repays: List[AaveRepayEvent], liquidations: List[AaveLiquidationEvent], db_events: List[AaveEvent], balances: AaveBalances, ) -> AaveEventProcessingResult: """Calculates the interest events and the total earned from all the given events. Also calculates total loss from borrowing and liquidations. Also returns the edited DB events """ actions: List[AaveDepositWithdrawalEvent] = [] borrow_actions: List[AaveEvent] = [] db_interest_events: Set[AaveInterestEvent] = set() for db_event in db_events: if db_event.event_type == 'deposit': actions.append(db_event) # type: ignore elif db_event.event_type == 'withdrawal': actions.append(db_event) # type: ignore elif db_event.event_type == 'interest': db_interest_events.add(db_event) # type: ignore elif db_event.event_type == 'borrow': borrow_actions.append(db_event) elif db_event.event_type == 'repay': borrow_actions.append(db_event) elif db_event.event_type == 'liquidation': borrow_actions.append(db_event) interest_events, total_earned = self._calculate_interest_and_profit( user_address=user_address, user_result=user_result, actions=actions + deposits + withdrawals, balances=balances, db_interest_events=db_interest_events, from_ts=from_ts, to_ts=to_ts, ) total_lost, total_earned_liquidations = _calculate_loss( borrow_actions=borrow_actions + borrows + repays + liquidations, # type: ignore balances=balances, ) return AaveEventProcessingResult( interest_events=interest_events, total_earned_interest=total_earned, total_lost=total_lost, total_earned_liquidations=total_earned_liquidations, ) def _get_user_data( self, from_ts: Timestamp, to_ts: Timestamp, address: ChecksumEthAddress, balances: AaveBalances, ) -> AaveHistory: last_query = self.database.get_used_query_range( f'aave_events_{address}') db_events = self.database.get_aave_events(address=address) now = ts_now() last_query_ts = 0 if last_query is not None: last_query_ts = last_query[1] from_ts = Timestamp(last_query_ts + 1) deposits = withdrawals = borrows = repays = liquidation_calls = [] query = self.graph.query( querystr=USER_EVENTS_QUERY, param_types={'$address': 'ID!'}, param_values={'address': address.lower()}, ) user_result = query['users'][0] if now - last_query_ts > AAVE_GRAPH_RECENT_SECS: # In theory if these were individual queries we should do them only if # we have not queried recently. In practise since we only do 1 query above # this is useless for now, but keeping the mechanism in case we change # the way we query the subgraph deposits = self._parse_deposits(user_result['depositHistory'], from_ts, to_ts) withdrawals = self._parse_withdrawals( withdrawals=user_result['redeemUnderlyingHistory'], from_ts=from_ts, to_ts=to_ts, ) borrows = self._parse_borrows(user_result['borrowHistory'], from_ts, to_ts) repays = self._parse_repays(user_result['repayHistory'], from_ts, to_ts) liquidation_calls = self._parse_liquidations( user_result['liquidationCallHistory'], from_ts, to_ts, ) result = self._process_events( user_address=address, user_result=user_result, from_ts=from_ts, to_ts=to_ts, deposits=deposits, withdrawals=withdrawals, borrows=borrows, repays=repays, liquidations=liquidation_calls, db_events=db_events, balances=balances, ) # Add all new events to the DB new_events: List[ AaveEvent] = deposits + withdrawals + result.interest_events + borrows + repays + liquidation_calls # type: ignore # noqa: E501 self.database.add_aave_events(address, new_events) # After all events have been queried then also update the query range. # Even if no events are found for an address we need to remember the range self.database.update_used_query_range( name=f'aave_events_{address}', start_ts=Timestamp(0), end_ts=now, ) # Sort actions so that actions with same time are sorted deposit -> interest -> withdrawal all_events: List[AaveEvent] = new_events + db_events sort_map = { 'deposit': 0, 'interest': 0.1, 'withdrawal': 0.2, 'borrow': 0.3, 'repay': 0.4, 'liquidation': 0.5 } # noqa: E501 all_events.sort( key=lambda event: sort_map[event.event_type] + event.timestamp) return AaveHistory( events=all_events, total_earned_interest=result.total_earned_interest, total_lost=result.total_lost, total_earned_liquidations=result.total_earned_liquidations, ) def _parse_deposits( self, deposits: List[Dict[str, Any]], from_ts: Timestamp, to_ts: Timestamp, ) -> List[AaveDepositWithdrawalEvent]: events: List[AaveDepositWithdrawalEvent] = [] for entry in deposits: common = _parse_common_event_data(entry, from_ts, to_ts) if common is None: continue # either timestamp out of range or error (logged in the function above) timestamp, tx_hash, index = common result = self._get_asset_and_balance( entry=entry, timestamp=timestamp, reserve_key='reserve', amount_key='amount', location='aave deposit from graph query', ) if result is None: continue # problem parsing, error already logged asset, balance = result atoken = ASSET_TO_ATOKENV1.get(asset, None) if atoken is None: log.error( f'Could not find an aToken for asset {asset} during aave deposit' ) continue events.append( AaveDepositWithdrawalEvent( event_type='deposit', asset=asset, atoken=atoken, value=balance, block_number=0, # can't get from graph query timestamp=timestamp, tx_hash=tx_hash, log_index= index, # not really the log index, but should also be unique )) return events def _parse_withdrawals( self, withdrawals: List[Dict[str, Any]], from_ts: Timestamp, to_ts: Timestamp, ) -> List[AaveDepositWithdrawalEvent]: events = [] for entry in withdrawals: common = _parse_common_event_data(entry, from_ts, to_ts) if common is None: continue # either timestamp out of range or error (logged in the function above) timestamp, tx_hash, index = common result = self._get_asset_and_balance( entry=entry, timestamp=timestamp, reserve_key='reserve', amount_key='amount', location='aave withdrawal from graph query', ) if result is None: continue # problem parsing, error already logged asset, balance = result atoken = ASSET_TO_ATOKENV1.get(asset, None) if atoken is None: log.error( f'Could not find an aToken for asset {asset} during aave withdraw' ) continue events.append( AaveDepositWithdrawalEvent( event_type='withdrawal', asset=asset, atoken=atoken, value=balance, block_number=0, # can't get from graph query timestamp=timestamp, tx_hash=tx_hash, log_index= index, # not really the log index, but should also be unique )) return events def _parse_borrows( self, borrows: List[Dict[str, Any]], from_ts: Timestamp, to_ts: Timestamp, ) -> List[AaveBorrowEvent]: events = [] for entry in borrows: common = _parse_common_event_data(entry, from_ts, to_ts) if common is None: continue # either timestamp out of range or error (logged in the function above) timestamp, tx_hash, index = common result = self._get_asset_and_balance( entry=entry, timestamp=timestamp, reserve_key='reserve', amount_key='amount', location='aave borrow from graph query', ) if result is None: continue # problem parsing, error already logged asset, balance = result borrow_rate = FVal(entry['borrowRate']) / RAY borrow_rate_mode = entry['borrowRateMode'] accrued_borrow_interest = entry['accruedBorrowInterest'] events.append( AaveBorrowEvent( event_type='borrow', asset=asset, value=balance, borrow_rate_mode=borrow_rate_mode.lower(), borrow_rate=borrow_rate, accrued_borrow_interest=accrued_borrow_interest, block_number=0, # can't get from graph query timestamp=timestamp, tx_hash=tx_hash, log_index= index, # not really the log index, but should also be unique )) return events def _parse_repays( self, repays: List[Dict[str, Any]], from_ts: Timestamp, to_ts: Timestamp, ) -> List[AaveRepayEvent]: events = [] for entry in repays: common = _parse_common_event_data(entry, from_ts, to_ts) if common is None: continue # either timestamp out of range or error (logged in the function above) timestamp, tx_hash, index = common result = _get_reserve_asset_and_decimals(entry, reserve_key='reserve') if result is None: continue # problem parsing, error already logged asset, decimals = result amount_after_fee = token_normalized_value_decimals( int(entry['amountAfterFee']), token_decimals=decimals, ) fee = token_normalized_value_decimals(int(entry['fee']), token_decimals=decimals) usd_price = query_usd_price_zero_if_error( asset=asset, time=timestamp, location='aave repay from graph query', msg_aggregator=self.msg_aggregator, ) events.append( AaveRepayEvent( event_type='repay', asset=asset, value=Balance(amount=amount_after_fee, usd_value=amount_after_fee * usd_price), fee=Balance(amount=fee, usd_value=fee * usd_price), block_number=0, # can't get from graph query timestamp=timestamp, tx_hash=tx_hash, log_index= index, # not really the log index, but should also be unique )) return events def _parse_liquidations( self, liquidations: List[Dict[str, Any]], from_ts: Timestamp, to_ts: Timestamp, ) -> List[AaveLiquidationEvent]: events = [] for entry in liquidations: common = _parse_common_event_data(entry, from_ts, to_ts) if common is None: continue # either timestamp out of range or error (logged in the function above) timestamp, tx_hash, index = common result = self._get_asset_and_balance( entry=entry, timestamp=timestamp, reserve_key='collateralReserve', amount_key='collateralAmount', location='aave liquidation from graph query', ) if result is None: continue # problem parsing, error already logged collateral_asset, collateral_balance = result result = self._get_asset_and_balance( entry=entry, timestamp=timestamp, reserve_key='principalReserve', amount_key='principalAmount', location='aave liquidation from graph query', ) if result is None: continue # problem parsing, error already logged principal_asset, principal_balance = result events.append( AaveLiquidationEvent( event_type='liquidation', collateral_asset=collateral_asset, collateral_balance=collateral_balance, principal_asset=principal_asset, principal_balance=principal_balance, block_number=0, # can't get from graph query timestamp=timestamp, tx_hash=tx_hash, log_index= index, # not really the log index, but should also be unique )) return events def get_history_for_address( self, user_address: ChecksumEthAddress, from_timestamp: Timestamp, to_timestamp: Timestamp, balances: AaveBalances, ) -> Optional[AaveHistory]: """ Queries aave history for a single address. This function should be entered while holding the history_lock semaphore """ reserves = self._get_user_reserves(address=user_address) if len(reserves) != 0: return self._get_user_data( from_ts=from_timestamp, to_ts=to_timestamp, address=user_address, balances=balances, ) return None def _get_asset_and_balance( self, entry: Dict[str, Any], timestamp: Timestamp, reserve_key: str, amount_key: str, location: str, ) -> Optional[Tuple[Asset, Balance]]: """Utility function to parse asset from graph query amount and price and return balance""" result = _get_reserve_asset_and_decimals(entry, reserve_key) if result is None: return None asset, decimals = result amount = token_normalized_value_decimals( token_amount=int(entry[amount_key]), token_decimals=decimals, ) usd_price = query_usd_price_zero_if_error( asset=asset, time=timestamp, location=location, msg_aggregator=self.msg_aggregator, ) return asset, Balance(amount=amount, usd_value=amount * usd_price)
class Liquity(HasDSProxy): def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, ) -> None: super().__init__( ethereum_manager=ethereum_manager, database=database, premium=premium, msg_aggregator=msg_aggregator, ) self.history_lock = Semaphore() try: self.graph = Graph( 'https://api.thegraph.com/subgraphs/name/liquity/liquity', ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(protocol='Liquity', error_msg=str(e)), ) raise ModuleInitializationFailure( 'Liquity Subgraph remote error') from e def get_positions( self, addresses_list: List[ChecksumEthAddress], ) -> Dict[ChecksumEthAddress, Trove]: contract = EthereumContract( address=LIQUITY_TROVE_MANAGER.address, abi=LIQUITY_TROVE_MANAGER.abi, deployed_block=LIQUITY_TROVE_MANAGER.deployed_block, ) # make a copy of the list to avoid modifications in the list that is passed as argument addresses = list(addresses_list) proxied_addresses = self._get_accounts_having_proxy() proxies_to_address = {v: k for k, v in proxied_addresses.items()} addresses += proxied_addresses.values() calls = [(LIQUITY_TROVE_MANAGER.address, contract.encode(method_name='Troves', arguments=[x])) for x in addresses] outputs = multicall_2( ethereum=self.ethereum, require_success=False, calls=calls, ) data: Dict[ChecksumEthAddress, Trove] = {} eth_price = Inquirer().find_usd_price(A_ETH) lusd_price = Inquirer().find_usd_price(A_LUSD) for idx, output in enumerate(outputs): status, result = output if status is True: try: trove_info = contract.decode(result, 'Troves', arguments=[addresses[idx]]) trove_is_active = bool(trove_info[3]) # pylint: disable=unsubscriptable-object if not trove_is_active: continue collateral = deserialize_asset_amount( token_normalized_value_decimals(trove_info[1], 18), # noqa: E501 pylint: disable=unsubscriptable-object ) debt = deserialize_asset_amount( token_normalized_value_decimals(trove_info[0], 18), # noqa: E501 pylint: disable=unsubscriptable-object ) collateral_balance = AssetBalance( asset=A_ETH, balance=Balance( amount=collateral, usd_value=eth_price * collateral, ), ) debt_balance = AssetBalance( asset=A_LUSD, balance=Balance( amount=debt, usd_value=lusd_price * debt, ), ) # Avoid division errors collateralization_ratio: Optional[FVal] liquidation_price: Optional[FVal] if debt > 0: collateralization_ratio = eth_price * collateral / debt * 100 else: collateralization_ratio = None if collateral > 0: liquidation_price = debt * lusd_price * FVal( MIN_COLL_RATE) / collateral else: liquidation_price = None account_address = addresses[idx] if account_address in proxies_to_address: account_address = proxies_to_address[account_address] data[account_address] = Trove( collateral=collateral_balance, debt=debt_balance, collateralization_ratio=collateralization_ratio, liquidation_price=liquidation_price, active=trove_is_active, trove_id=trove_info[4], # pylint: disable=unsubscriptable-object ) except DeserializationError as e: self.msg_aggregator.add_warning( f'Ignoring Liquity trove information. ' f'Failed to decode contract information. {str(e)}.', ) return data def liquity_staking_balances( self, addresses: List[ChecksumEthAddress], ) -> Dict[ChecksumEthAddress, StakePosition]: staked = self._get_raw_history(addresses, 'stake') lqty_price = Inquirer().find_usd_price(A_LQTY) data = {} for stake in staked['lqtyStakes']: try: owner = to_checksum_address(stake['id']) amount = deserialize_optional_to_fval( value=stake['amount'], name='amount', location='liquity', ) position = AssetBalance( asset=A_LQTY, balance=Balance( amount=amount, usd_value=lqty_price * amount, ), ) data[owner] = StakePosition(position) except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' self.msg_aggregator.add_warning( f'Ignoring Liquity staking information. ' f'Failed to decode remote response. {msg}.', ) continue return data def _process_trove_events( self, changes: List[Dict[str, Any]], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> List[DefiEvent]: events = [] total_lusd_trove_balance = Balance() realized_trove_lusd_loss = Balance() for change in changes: try: operation = TroveOperation.deserialize( change['troveOperation']) collateral_change = deserialize_asset_amount( change['collateralChange']) debt_change = deserialize_asset_amount(change['debtChange']) timestamp = change['transaction']['timestamp'] if timestamp < from_timestamp: continue if timestamp > to_timestamp: break got_asset: Optional[Asset] spent_asset: Optional[Asset] pnl = got_asset = got_balance = spent_asset = spent_balance = None count_spent_got_cost_basis = False # In one transaction it is possible to generate debt and change the collateral if debt_change != AssetAmount(ZERO): if debt_change > ZERO: # Generate debt count_spent_got_cost_basis = True got_asset = A_LUSD got_balance = Balance( amount=debt_change, usd_value=query_usd_price_or_use_default( asset=A_LUSD, time=timestamp, default_value=ZERO, location='Liquity', ), ) total_lusd_trove_balance += got_balance else: # payback debt count_spent_got_cost_basis = True spent_asset = A_LUSD spent_balance = Balance( amount=abs(debt_change), usd_value=query_usd_price_or_use_default( asset=A_LUSD, time=timestamp, default_value=ZERO, location='Liquity', ), ) total_lusd_trove_balance -= spent_balance balance = total_lusd_trove_balance.amount + realized_trove_lusd_loss.amount if balance < ZERO: pnl_balance = total_lusd_trove_balance + realized_trove_lusd_loss realized_trove_lusd_loss += -pnl_balance pnl = [ AssetBalance(asset=A_LUSD, balance=pnl_balance) ] if collateral_change != AssetAmount(ZERO): if collateral_change < ZERO: # Withdraw collateral got_asset = A_ETH got_balance = Balance( amount=abs(collateral_change), usd_value=query_usd_price_or_use_default( asset=A_ETH, time=timestamp, default_value=ZERO, location='Liquity', ), ) else: # Deposit collateral spent_asset = A_ETH spent_balance = Balance( amount=collateral_change, usd_value=query_usd_price_or_use_default( asset=A_ETH, time=timestamp, default_value=ZERO, location='Liquity', ), ) if operation in ( TroveOperation.LIQUIDATEINNORMALMODE, TroveOperation.LIQUIDATEINRECOVERYMODE, ): count_spent_got_cost_basis = True spent_asset = A_ETH spent_balance = Balance( amount=abs(collateral_change), usd_value=query_usd_price_or_use_default( asset=A_ETH, time=timestamp, default_value=ZERO, location='Liquity', ), ) pnl = [AssetBalance(asset=A_ETH, balance=-spent_balance)] event = DefiEvent( timestamp=Timestamp(change['transaction']['timestamp']), wrapped_event=change, event_type=DefiEventType.LIQUITY, got_asset=got_asset, got_balance=got_balance, spent_asset=spent_asset, spent_balance=spent_balance, pnl=pnl, count_spent_got_cost_basis=count_spent_got_cost_basis, tx_hash=change['transaction']['id'], ) events.append(event) except (DeserializationError, KeyError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' log.debug( f'Failed to extract defievent in Liquity from {change}') self.msg_aggregator.add_warning( f'Ignoring Liquity Trove event in Liquity. ' f'Failed to decode remote information. {msg}.', ) continue return events def _get_raw_history( self, addresses: List[ChecksumEthAddress], query_for: Literal['stake', 'trove'], ) -> Dict[str, Any]: param_types = { '$addresses': '[Bytes!]', } param_values = { 'addresses': [addr.lower() for addr in addresses], } if query_for == 'trove': querystr = format_query_indentation(QUERY_TROVE) else: querystr = format_query_indentation(QUERY_STAKE) return self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) def get_trove_history( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> Dict[ChecksumEthAddress, List[LiquityEvent]]: addresses_to_query = list(addresses) proxied_addresses = self._get_accounts_having_proxy() proxies_to_address = {v: k for k, v in proxied_addresses.items()} addresses_to_query += proxied_addresses.values() try: query = self._get_raw_history(addresses_to_query, 'trove') except RemoteError as e: log.error( f'Failed to query trove graph events for liquity. {str(e)}') query = {} result: Dict[ChecksumEthAddress, List[LiquityEvent]] = defaultdict(list) for trove in query.get('troves', []): owner = to_checksum_address(trove['owner']['id']) if owner in proxies_to_address: owner = proxies_to_address[owner] for change in trove['changes']: try: timestamp = change['transaction']['timestamp'] if timestamp < from_timestamp: continue if timestamp > to_timestamp: break operation = TroveOperation.deserialize( change['troveOperation']) collateral_change = deserialize_optional_to_fval( value=change['collateralChange'], name='collateralChange', location='liquity', ) debt_change = deserialize_optional_to_fval( value=change['debtChange'], name='debtChange', location='liquity', ) lusd_price = PriceHistorian().query_historical_price( from_asset=A_LUSD, to_asset=A_USD, timestamp=timestamp, ) eth_price = PriceHistorian().query_historical_price( from_asset=A_ETH, to_asset=A_USD, timestamp=timestamp, ) debt_after_amount = deserialize_optional_to_fval( value=change['debtAfter'], name='debtAfter', location='liquity', ) collateral_after_amount = deserialize_optional_to_fval( value=change['collateralAfter'], name='collateralAfter', location='liquity', ) event = LiquityTroveEvent( kind='trove', tx=change['transaction']['id'], address=owner, timestamp=timestamp, debt_after=AssetBalance( asset=A_LUSD, balance=Balance( amount=debt_after_amount, usd_value=lusd_price * debt_after_amount, ), ), collateral_after=AssetBalance( asset=A_ETH, balance=Balance( amount=collateral_after_amount, usd_value=eth_price * collateral_after_amount, ), ), debt_delta=AssetBalance( asset=A_LUSD, balance=Balance( amount=debt_change, usd_value=lusd_price * debt_change, ), ), collateral_delta=AssetBalance( asset=A_ETH, balance=Balance( amount=collateral_change, usd_value=eth_price * collateral_change, ), ), trove_operation=operation, sequence_number=str(change['sequenceNumber']), ) result[owner].append(event) except (DeserializationError, KeyError) as e: log.debug( f'Failed to deserialize Liquity trove event: {change}') msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' self.msg_aggregator.add_warning( f'Ignoring Liquity Trove event in Liquity. ' f'Failed to decode remote information. {msg}.', ) continue return result def get_staking_history( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> Dict[ChecksumEthAddress, List[LiquityEvent]]: try: staked = self._get_raw_history(addresses, 'stake') except RemoteError as e: log.error( f'Failed to query stake graph events for liquity. {str(e)}') staked = {} result: Dict[ChecksumEthAddress, List[LiquityEvent]] = defaultdict(list) for stake in staked.get('lqtyStakes', []): owner = to_checksum_address(stake['id']) for change in stake['changes']: try: timestamp = change['transaction']['timestamp'] if timestamp < from_timestamp: continue if timestamp > to_timestamp: break operation_stake = LiquityStakeEventType.deserialize( change['stakeOperation']) lqty_price = PriceHistorian().query_historical_price( from_asset=A_LQTY, to_asset=A_USD, timestamp=timestamp, ) lusd_price = PriceHistorian().query_historical_price( from_asset=A_LUSD, to_asset=A_USD, timestamp=timestamp, ) stake_after = deserialize_optional_to_fval( value=change['stakedAmountAfter'], name='stakedAmountAfter', location='liquity', ) stake_change = deserialize_optional_to_fval( value=change['stakedAmountChange'], name='stakedAmountChange', location='liquity', ) issuance_gain = deserialize_optional_to_fval( value=change['issuanceGain'], name='issuanceGain', location='liquity', ) redemption_gain = deserialize_optional_to_fval( value=change['redemptionGain'], name='redemptionGain', location='liquity', ) stake_event = LiquityStakeEvent( kind='stake', tx=change['transaction']['id'], address=owner, timestamp=timestamp, stake_after=AssetBalance( asset=A_LQTY, balance=Balance( amount=stake_after, usd_value=lqty_price * stake_after, ), ), stake_change=AssetBalance( asset=A_LQTY, balance=Balance( amount=stake_change, usd_value=lqty_price * stake_change, ), ), issuance_gain=AssetBalance( asset=A_LUSD, balance=Balance( amount=issuance_gain, usd_value=lusd_price * issuance_gain, ), ), redemption_gain=AssetBalance( asset=A_LUSD, balance=Balance( amount=redemption_gain, usd_value=lusd_price * redemption_gain, ), ), stake_operation=operation_stake, sequence_number=str( change['transaction']['sequenceNumber']), ) result[owner].append(stake_event) except (DeserializationError, KeyError) as e: msg = str(e) log.debug(f'Failed to deserialize Liquity entry: {change}') if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' self.msg_aggregator.add_warning( f'Ignoring Liquity Stake event in Liquity. ' f'Failed to decode remote information. {msg}.', ) continue return result def get_history_events( self, from_timestamp: Timestamp, to_timestamp: Timestamp, addresses: List[ChecksumEthAddress], ) -> List[DefiEvent]: query = self._get_raw_history(addresses, 'trove') result = [] for trove in query['troves']: changes = self._process_trove_events(trove['changes'], from_timestamp, to_timestamp) result.append(changes) # Flatten the result (list of lists to list) if result: return reduce(add, result) return [] # -- Methods following the EthereumModule interface -- # def on_account_addition( self, address: ChecksumEthAddress) -> Optional[List['AssetBalance']]: super().on_account_addition(address) trove_info = self.get_positions([address]) result = [] if address in trove_info: result.append(trove_info[address].collateral) stake_info = self.liquity_staking_balances([address]) if address in stake_info: result.append(stake_info[address].staked) return result
class Compound(EthereumModule): """Compound integration module https://compound.finance/docs#guides """ def __init__( self, ethereum_manager: 'EthereumManager', database: DBHandler, premium: Optional[Premium], msg_aggregator: MessagesAggregator, ): self.ethereum = ethereum_manager self.database = database self.premium = premium self.msg_aggregator = msg_aggregator self.graph = Graph('https://api.thegraph.com/subgraphs/name/graphprotocol/compound-v2') self.comptroller_address = to_checksum_address(self.ethereum.call_contract( contract_address=COMPTROLLER_PROXY.address, abi=COMPTROLLER_PROXY.abi, method_name='comptrollerImplementation', )) def _get_apy(self, address: ChecksumEthAddress, supply: bool) -> Optional[FVal]: method_name = 'supplyRatePerBlock' if supply else 'borrowRatePerBlock' try: rate = self.ethereum.call_contract( contract_address=address, abi=CTOKEN_ABI, method_name=method_name, ) except (RemoteError, BlockchainQueryError) as e: log.error(f'Could not query cToken {address} for supply/borrow rate: {str(e)}') return None apy = ((FVal(rate) / ETH_MANTISSA * BLOCKS_PER_DAY) + 1) ** (DAYS_PER_YEAR - 1) - 1 # noqa: E501 return apy def get_balances( self, given_defi_balances: GIVEN_DEFI_BALANCES, ) -> Dict[ChecksumEthAddress, Dict]: compound_balances = {} if isinstance(given_defi_balances, dict): defi_balances = given_defi_balances else: defi_balances = given_defi_balances() for account, balance_entries in defi_balances.items(): lending_map = {} borrowing_map = {} rewards_map = {} for balance_entry in balance_entries: if balance_entry.protocol.name != 'Compound': continue entry = balance_entry.base_balance try: asset = Asset(entry.token_symbol) except UnknownAsset: log.error( f'Encountered unknown asset {entry.token_symbol} in compound. Skipping', ) continue if entry.token_address == A_COMP.ethereum_address: rewards_map[A_COMP] = CompoundBalance( balance_type=BalanceType.ASSET, balance=entry.balance, apy=None, ) continue if balance_entry.balance_type == 'Asset': # Get the underlying balance underlying_symbol = balance_entry.underlying_balances[0].token_symbol try: underlying_asset = Asset(underlying_symbol) except UnknownAsset: log.error( f'Encountered unknown asset {underlying_symbol} in compound. Skipping', ) continue lending_map[underlying_asset.identifier] = CompoundBalance( balance_type=BalanceType.ASSET, balance=balance_entry.underlying_balances[0].balance, apy=self._get_apy(entry.token_address, supply=True), ) else: # 'Debt' try: ctoken = EthereumToken('c' + entry.token_symbol) except UnknownAsset: log.error( f'Encountered unknown asset {entry.token_symbol} in ' f'compound while figuring out cToken. Skipping', ) continue borrowing_map[asset.identifier] = CompoundBalance( balance_type=BalanceType.DEBT, balance=entry.balance, apy=self._get_apy(ctoken.ethereum_address, supply=False), ) if lending_map == {} and borrowing_map == {} and rewards_map == {}: # no balances for the account continue compound_balances[account] = { 'rewards': rewards_map, 'lending': lending_map, 'borrowing': borrowing_map, } return compound_balances def _get_borrow_events( self, event_type: Literal['borrow', 'repay'], address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: param_types, param_values = _get_params(from_ts, to_ts, address) if event_type == 'borrow': graph_event_name = 'borrowEvents' payer_or_empty = '' elif event_type == 'repay': graph_event_name = 'repayEvents' payer_or_empty = 'payer' result = self.graph.query( querystr=BORROW_EVENTS_QUERY_PREFIX.format( graph_event_name=graph_event_name, payer_or_empty=payer_or_empty, ), param_types=param_types, param_values=param_values, ) events = [] for entry in result[graph_event_name]: underlying_symbol = entry['underlyingSymbol'] try: underlying_asset = Asset(underlying_symbol) except UnknownAsset: log.error( f'Found unexpected token symbol {underlying_symbol} during ' f'graph query. Skipping.', ) continue timestamp = entry['blockTime'] usd_price = query_usd_price_zero_if_error( asset=underlying_asset, time=timestamp, location=f'compound {event_type}', msg_aggregator=self.msg_aggregator, ) amount = FVal(entry['amount']) parse_result = _get_txhash_and_logidx(entry['id']) if parse_result is None: log.error( f'Found unprocessable borrow/repay id from the graph {entry["id"]}. Skipping', ) continue events.append(CompoundEvent( event_type=event_type, address=address, block_number=entry['blockNumber'], timestamp=timestamp, asset=underlying_asset, value=Balance(amount=amount, usd_value=amount * usd_price), to_asset=None, to_value=None, realized_pnl=None, tx_hash=parse_result[0], log_index=parse_result[1], )) return events def _get_liquidation_events( self, address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: """https://compound.finance/docs/ctokens#liquidate-borrow""" param_types, param_values = _get_params(from_ts, to_ts, address) result = self.graph.query( querystr="""liquidationEvents (where: {blockTime_lte: $end_ts, blockTime_gte: $start_ts, from: $address}) { id amount from blockNumber blockTime cTokenSymbol underlyingSymbol underlyingRepayAmount }}""", param_types=param_types, param_values=param_values, ) events = [] for entry in result['liquidationEvents']: ctoken_symbol = entry['cTokenSymbol'] try: ctoken_asset = Asset(ctoken_symbol) except UnknownAsset: log.error( f'Found unexpected cTokenSymbol {ctoken_symbol} during graph query. Skipping.') continue underlying_symbol = entry['underlyingSymbol'] try: underlying_asset = Asset(underlying_symbol) except UnknownAsset: log.error( f'Found unexpected token symbol {underlying_symbol} during ' f'graph query. Skipping.', ) continue timestamp = entry['blockTime'] # Amount/value of underlying asset paid by liquidator # Essentially liquidator covers part of the debt of the user debt_amount = FVal(entry['underlyingRepayAmount']) underlying_usd_price = query_usd_price_zero_if_error( asset=underlying_asset, time=timestamp, location='compound liquidation underlying asset', msg_aggregator=self.msg_aggregator, ) debt_usd_value = debt_amount * underlying_usd_price # Amount/value of ctoken_asset lost to the liquidator # This is what the liquidator gains at a discount liquidated_amount = FVal(entry['amount']) liquidated_usd_price = query_usd_price_zero_if_error( asset=ctoken_asset, time=timestamp, location='compound liquidation ctoken asset', msg_aggregator=self.msg_aggregator, ) liquidated_usd_value = liquidated_amount * liquidated_usd_price parse_result = _get_txhash_and_logidx(entry['id']) if parse_result is None: log.error( f'Found unprocessable liquidation id from the graph {entry["id"]}. Skipping', ) continue gained_value = Balance(amount=debt_amount, usd_value=debt_usd_value) lost_value = Balance(amount=liquidated_amount, usd_value=liquidated_usd_value) events.append(CompoundEvent( event_type='liquidation', address=address, block_number=entry['blockNumber'], timestamp=timestamp, asset=underlying_asset, value=gained_value, to_asset=ctoken_asset, to_value=lost_value, realized_pnl=None, tx_hash=parse_result[0], log_index=parse_result[1], )) return events def _get_lend_events( self, event_type: Literal['mint', 'redeem'], address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: param_types, param_values = _get_params(from_ts, to_ts, address) if event_type == 'mint': graph_event_name = 'mintEvents' addr_position = 'to' elif event_type == 'redeem': graph_event_name = 'redeemEvents' addr_position = 'from' result = self.graph.query( querystr=LEND_EVENTS_QUERY_PREFIX.format( graph_event_name=graph_event_name, addr_position=addr_position, ), param_types=param_types, param_values=param_values, ) events = [] for entry in result[graph_event_name]: ctoken_symbol = entry['cTokenSymbol'] try: ctoken_asset = Asset(ctoken_symbol) except UnknownAsset: log.error( f'Found unexpected cTokenSymbol {ctoken_symbol} during graph query. Skipping.') continue underlying_symbol = ctoken_symbol[1:] try: underlying_asset = Asset(underlying_symbol) except UnknownAsset: log.error( f'Found unexpected token symbol {underlying_symbol} during ' f'graph query. Skipping.', ) continue timestamp = entry['blockTime'] usd_price = query_usd_price_zero_if_error( asset=underlying_asset, time=timestamp, location=f'compound {event_type}', msg_aggregator=self.msg_aggregator, ) underlying_amount = FVal(entry['underlyingAmount']) usd_value = underlying_amount * usd_price parse_result = _get_txhash_and_logidx(entry['id']) if parse_result is None: log.error(f'Found unprocessable mint id from the graph {entry["id"]}. Skipping') continue amount = FVal(entry['amount']) if event_type == 'mint': from_value = Balance(amount=underlying_amount, usd_value=usd_value) to_value = Balance(amount=amount, usd_value=usd_value) from_asset = underlying_asset to_asset = ctoken_asset else: # redeem from_value = Balance(amount=amount, usd_value=usd_value) to_value = Balance(amount=underlying_amount, usd_value=usd_value) from_asset = ctoken_asset to_asset = underlying_asset events.append(CompoundEvent( event_type=event_type, address=address, block_number=entry['blockNumber'], timestamp=timestamp, asset=from_asset, value=from_value, to_asset=to_asset, to_value=to_value, realized_pnl=None, tx_hash=parse_result[0], log_index=parse_result[1], )) return events def _get_comp_events( self, address: ChecksumEthAddress, from_ts: Timestamp, to_ts: Timestamp, ) -> List[CompoundEvent]: self.ethereum.etherscan.get_blocknumber_by_time(from_ts) from_block = max( COMP_DEPLOYED_BLOCK, self.ethereum.etherscan.get_blocknumber_by_time(from_ts), ) argument_filters = { 'from': COMPTROLLER_PROXY.address, 'to': address, } comp_events = self.ethereum.get_logs( contract_address=A_COMP.ethereum_address, abi=ERC20TOKEN_ABI, event_name='Transfer', argument_filters=argument_filters, from_block=from_block, to_block=self.ethereum.etherscan.get_blocknumber_by_time(to_ts), ) events = [] for event in comp_events: timestamp = self.ethereum.get_event_timestamp(event) amount = token_normalized_value(hex_or_bytes_to_int(event['data']), A_COMP.decimals) usd_price = query_usd_price_zero_if_error( asset=A_COMP, time=timestamp, location='comp_claim', msg_aggregator=self.msg_aggregator, ) value = Balance(amount, amount * usd_price) events.append(CompoundEvent( event_type='comp', address=address, block_number=deserialize_blocknumber(event['blockNumber']), timestamp=timestamp, asset=A_COMP, value=value, to_asset=None, to_value=None, realized_pnl=value, tx_hash=event['transactionHash'], log_index=deserialize_int_from_hex_or_int(event['logIndex'], 'comp log index'), )) return events def _process_events( self, events: List[CompoundEvent], given_defi_balances: GIVEN_DEFI_BALANCES, ) -> Tuple[ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS, ADDRESS_TO_ASSETS]: """Processes all events and returns a dictionary of earned balances totals""" assets: ADDRESS_TO_ASSETS = defaultdict(lambda: defaultdict(Balance)) loss_assets: ADDRESS_TO_ASSETS = defaultdict(lambda: defaultdict(Balance)) rewards_assets: ADDRESS_TO_ASSETS = defaultdict(lambda: defaultdict(Balance)) profit_so_far: ADDRESS_TO_ASSETS = defaultdict(lambda: defaultdict(Balance)) loss_so_far: ADDRESS_TO_ASSETS = defaultdict(lambda: defaultdict(Balance)) liquidation_profit: ADDRESS_TO_ASSETS = defaultdict(lambda: defaultdict(Balance)) balances = self.get_balances(given_defi_balances) for idx, event in enumerate(events): if event.event_type == 'mint': assets[event.address][event.asset] -= event.value elif event.event_type == 'redeem': assert event.to_asset, 'redeem events should have a to_asset' assert event.to_value, 'redeem events should have a to_value' profit_amount = ( assets[event.address][event.to_asset].amount + event.to_value.amount - profit_so_far[event.address][event.to_asset].amount ) profit: Optional[Balance] if profit_amount >= 0: usd_price = query_usd_price_zero_if_error( asset=event.to_asset, time=event.timestamp, location='comp redeem event processing', msg_aggregator=self.msg_aggregator, ) profit = Balance(profit_amount, profit_amount * usd_price) profit_so_far[event.address][event.to_asset] += profit else: profit = None assets[event.address][event.to_asset] += event.to_value events[idx] = event._replace(realized_pnl=profit) # TODO: maybe not named tuple? elif event.event_type == 'borrow': loss_assets[event.address][event.asset] -= event.value elif event.event_type == 'repay': loss_amount = ( loss_assets[event.address][event.asset].amount + event.value.amount - loss_so_far[event.address][event.asset].amount ) loss: Optional[Balance] if loss_amount >= 0: usd_price = query_usd_price_zero_if_error( asset=event.asset, time=event.timestamp, location='comp repay event processing', msg_aggregator=self.msg_aggregator, ) loss = Balance(loss_amount, loss_amount * usd_price) loss_so_far[event.address][event.asset] += loss else: loss = None loss_assets[event.address][event.asset] += event.value events[idx] = event._replace(realized_pnl=loss) # TODO: maybe not named tuple? elif event.event_type == 'liquidation': assert event.to_asset, 'liquidation events should have a to_asset' # Liquidator covers part of the borrowed amount loss_assets[event.address][event.asset] += event.value liquidation_profit[event.address][event.asset] += event.value # Liquidator receives discounted to_asset loss_assets[event.address][event.to_asset] += event.to_value loss_so_far[event.address][event.to_asset] += event.to_value elif event.event_type == 'comp': rewards_assets[event.address][A_COMP] += event.value for address, bentry in balances.items(): for asset, entry in bentry['lending'].items(): profit_so_far[address][asset] += entry.balance for asset, entry in bentry['borrowing'].items(): remaining = entry.balance + loss_assets[address][asset] if remaining.amount < ZERO: continue loss_so_far[address][asset] += remaining if loss_so_far[address][asset].usd_value < ZERO: amount = loss_so_far[address][asset].amount loss_so_far[address][asset] = Balance( amount=amount, usd_value=amount * Inquirer().find_usd_price(Asset(asset)), ) for asset, entry in bentry['rewards'].items(): rewards_assets[address][asset] += entry.balance return profit_so_far, loss_so_far, liquidation_profit, rewards_assets def get_history( self, given_defi_balances: GIVEN_DEFI_BALANCES, addresses: List[ChecksumEthAddress], reset_db_data: bool, # pylint: disable=unused-argument from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> Dict[str, Any]: """May raise: - RemoteError due to the graph query failure or etherscan """ history: Dict[str, Any] = {} events: List[CompoundEvent] = [] for address in addresses: user_events = self._get_lend_events('mint', address, from_timestamp, to_timestamp) user_events.extend(self._get_lend_events('redeem', address, from_timestamp, to_timestamp)) # noqa: E501 user_events.extend(self._get_borrow_events('borrow', address, from_timestamp, to_timestamp)) # noqa: E501 repay_events = self._get_borrow_events('repay', address, from_timestamp, to_timestamp) liquidation_events = self._get_liquidation_events(address, from_timestamp, to_timestamp) # noqa: E501 indices_to_remove = [] for levent in liquidation_events: for ridx, revent in enumerate(repay_events): if levent.tx_hash == revent.tx_hash: indices_to_remove.append(ridx) for i in sorted(indices_to_remove, reverse=True): del repay_events[i] user_events.extend(repay_events) user_events.extend(liquidation_events) if len(user_events) != 0: # query comp events only if any other event has happened user_events.extend(self._get_comp_events(address, from_timestamp, to_timestamp)) events.extend(user_events) events.sort(key=lambda x: x.timestamp) history['events'] = events profit, loss, liquidation, rewards = self._process_events(events, given_defi_balances) history['interest_profit'] = profit history['liquidation_profit'] = liquidation history['debt_loss'] = loss history['rewards'] = rewards return history # -- Methods following the EthereumModule interface -- # def on_startup(self) -> None: pass def on_account_addition(self, address: ChecksumEthAddress) -> None: pass def on_account_removal(self, address: ChecksumEthAddress) -> None: pass
class Uniswap(AMMSwapPlatform, EthereumModule): """Uniswap integration module * Uniswap subgraph: https://github.com/Uniswap/uniswap-v2-subgraph https://github.com/croco-finance/uniswap-v3-subgraph """ def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, ) -> None: try: self.graph = Graph( 'https://api.thegraph.com/subgraphs/name/benesjan/uniswap-v2', ) self.graph_v3 = Graph( 'https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v3', ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format( error_msg=str(e), location=self.location, ), ) raise ModuleInitializationFailure( 'Uniswap subgraph remote error') from e super().__init__( location=Location.UNISWAP, ethereum_manager=ethereum_manager, database=database, premium=premium, msg_aggregator=msg_aggregator, graph=self.graph, ) def get_balances_chain( self, addresses: List[ChecksumEthAddress]) -> ProtocolBalance: """Get the addresses' pools data via chain queries.""" known_assets: Set[EthereumToken] = set() unknown_assets: Set[EthereumToken] = set() lp_addresses = get_latest_lp_addresses(self.data_directory) address_mapping = {} for address in addresses: pool_balances = uniswap_lp_token_balances( userdb=self.database, address=address, ethereum=self.ethereum, lp_addresses=lp_addresses, known_assets=known_assets, unknown_assets=unknown_assets, ) if len(pool_balances) != 0: address_mapping[address] = pool_balances protocol_balance = ProtocolBalance( address_balances=address_mapping, known_assets=known_assets, unknown_assets=unknown_assets, ) return protocol_balance def _get_events_balances( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> AddressEventsBalances: """Request via graph all events for new addresses and the latest ones for already existing addresses. Then the requested events are written in DB and finally all DB events are read, and processed for calculating total profit/loss per LP (stored within <LiquidityPoolEventsBalance>). """ address_events_balances: AddressEventsBalances = {} address_events: DDAddressEvents = defaultdict(list) db_address_events: AddressEvents = {} new_addresses: List[ChecksumEthAddress] = [] existing_addresses: List[ChecksumEthAddress] = [] min_end_ts: Timestamp = to_timestamp # Get addresses' last used query range for Uniswap events for address in addresses: entry_name = f'{UNISWAP_EVENTS_PREFIX}_{address}' events_range = self.database.get_used_query_range(name=entry_name) if not events_range: new_addresses.append(address) else: existing_addresses.append(address) min_end_ts = min(min_end_ts, events_range[1]) # Request new addresses' events if new_addresses: start_ts = Timestamp(0) for address in new_addresses: for event_type in EventType: new_address_events = self._get_events_graph( address=address, start_ts=start_ts, end_ts=to_timestamp, event_type=event_type, ) if new_address_events: address_events[address].extend(new_address_events) # Insert new address' last used query range self.database.update_used_query_range( name=f'{UNISWAP_EVENTS_PREFIX}_{address}', start_ts=start_ts, end_ts=to_timestamp, ) # Request existing DB addresses' events if existing_addresses and to_timestamp > min_end_ts: for address in existing_addresses: for event_type in EventType: address_new_events = self._get_events_graph( address=address, start_ts=min_end_ts, end_ts=to_timestamp, event_type=event_type, ) if address_new_events: address_events[address].extend(address_new_events) # Update existing address' last used query range self.database.update_used_query_range( name=f'{UNISWAP_EVENTS_PREFIX}_{address}', start_ts=min_end_ts, end_ts=to_timestamp, ) # Insert requested events in DB all_events = [] for address in filter(lambda x: x in address_events, addresses): all_events.extend(address_events[address]) self.database.add_amm_events(all_events) # Fetch all DB events within the time range for address in addresses: db_events = self.database.get_amm_events( events=[EventType.MINT_UNISWAP, EventType.BURN_UNISWAP], from_ts=from_timestamp, to_ts=to_timestamp, address=address, ) if db_events: # return events with the oldest first db_events.sort( key=lambda event: (event.timestamp, event.log_index)) db_address_events[address] = db_events # Request addresses' current balances (UNI-V2s and underlying tokens) # if there is no specific time range in this endpoint call (i.e. all # events). Current balances in the protocol are needed for an accurate # profit/loss calculation. # TODO: when this endpoint is called with a specific time range, # getting the balances and underlying tokens within that time range # requires an archive node. Feature pending to be developed. address_balances: AddressToLPBalances = { } # Empty when specific time range if from_timestamp == Timestamp(0): address_balances = self.get_balances(addresses) # Calculate addresses' event balances (i.e. profit/loss per pool) for address, events in db_address_events.items(): balances = address_balances.get( address, []) # Empty when specific time range events_balances = self._calculate_events_balances( address=address, events=events, balances=balances, ) address_events_balances[address] = events_balances return address_events_balances def _get_trades( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, only_cache: bool, ) -> AddressTrades: """Request via graph all trades for new addresses and the latest ones for already existing addresses. Then the requested trade are written in DB and finally all DB trades are read and returned. """ address_amm_trades: AddressTrades = {} new_addresses: List[ChecksumEthAddress] = [] existing_addresses: List[ChecksumEthAddress] = [] min_end_ts: Timestamp = to_timestamp if only_cache: return self._fetch_trades_from_db(addresses, from_timestamp, to_timestamp) # Get addresses' last used query range for Uniswap trades for address in addresses: entry_name = f'{UNISWAP_TRADES_PREFIX}_{address}' trades_range = self.database.get_used_query_range(name=entry_name) if not trades_range: new_addresses.append(address) else: existing_addresses.append(address) min_end_ts = min(min_end_ts, trades_range[1]) # Request new addresses' trades if new_addresses: start_ts = Timestamp(0) new_address_trades = self._get_trades_graph( addresses=new_addresses, start_ts=start_ts, end_ts=to_timestamp, ) address_amm_trades.update(new_address_trades) # Insert last used query range for new addresses for address in new_addresses: entry_name = f'{UNISWAP_TRADES_PREFIX}_{address}' self.database.update_used_query_range( name=entry_name, start_ts=start_ts, end_ts=to_timestamp, ) # Request existing DB addresses' trades if existing_addresses and to_timestamp > min_end_ts: address_new_trades = self._get_trades_graph( addresses=existing_addresses, start_ts=min_end_ts, end_ts=to_timestamp, ) address_amm_trades.update(address_new_trades) # Update last used query range for existing addresses for address in existing_addresses: entry_name = f'{UNISWAP_TRADES_PREFIX}_{address}' self.database.update_used_query_range( name=entry_name, start_ts=min_end_ts, end_ts=to_timestamp, ) # Insert all unique swaps to the DB all_swaps = set() for address in filter(lambda x: x in address_amm_trades, addresses): for trade in address_amm_trades[address]: for swap in trade.swaps: all_swaps.add(swap) self.database.add_amm_swaps(list(all_swaps)) return self._fetch_trades_from_db(addresses, from_timestamp, to_timestamp) def _get_trades_graph_for_address( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, ) -> List[AMMTrade]: trades = [] try: trades.extend(self._read_subgraph_trades(address, start_ts, end_ts)) except RemoteError as e: log.error( f'Error querying uniswap v2 trades using graph for address {address} ' f'between {start_ts} and {end_ts}. {str(e)}', ) try: trades.extend( self._get_trades_graph_v3_for_address(address, start_ts, end_ts)) except RemoteError as e: log.error( f'Error querying uniswap v3 trades using graph for address {address} ' f'between {start_ts} and {end_ts}. {str(e)}', ) return trades def _get_trades_graph_v3_for_address( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, ) -> List[AMMTrade]: """Get the address' trades data querying the Uniswap subgraph Each trade (swap) instantiates an <AMMTrade>. The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`. Translated to Uniswap lingo: Trade type BUY: - `amount1` (QUOTE, reserve1) is gt 0. - `amount0` (BASE, reserve0) is lt 0. Trade type SELL: - `amount0` (BASE, reserve0) is gt 0. - `amount1` (QUOTE, reserve1) is lt 0. May raise: - RemoteError """ trades: List[AMMTrade] = [] param_types = { '$limit': 'Int!', '$offset': 'Int!', '$address': 'Bytes!', '$start_ts': 'BigInt!', '$end_ts': 'BigInt!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'address': address.lower(), 'start_ts': str(start_ts), 'end_ts': str(end_ts), } querystr = format_query_indentation(V3_SWAPS_QUERY.format()) while True: try: result = self.graph_v3.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format( error_msg=str(e), location=self.location, ), ) raise result_data = result['swaps'] for entry in result_data: swaps = [] for swap in entry['transaction']['swaps']: timestamp = swap['timestamp'] swap_token0 = swap['token0'] swap_token1 = swap['token1'] try: token0_deserialized = deserialize_ethereum_address( swap_token0['id']) token1_deserialized = deserialize_ethereum_address( swap_token1['id']) from_address_deserialized = deserialize_ethereum_address( swap['sender']) to_address_deserialized = deserialize_ethereum_address( swap['recipient']) except DeserializationError: msg = ( f'Failed to deserialize addresses in trade from uniswap graph with ' f'token 0: {swap_token0["id"]}, token 1: {swap_token1["id"]}, ' f'swap sender: {swap["sender"]}, swap receiver {swap["to"]}' ) log.error(msg) continue token0 = get_or_create_ethereum_token( userdb=self.database, symbol=swap_token0['symbol'], ethereum_address=token0_deserialized, name=swap_token0['name'], decimals=swap_token0['decimals'], ) token1 = get_or_create_ethereum_token( userdb=self.database, symbol=swap_token1['symbol'], ethereum_address=token1_deserialized, name=swap_token1['name'], decimals=int(swap_token1['decimals']), ) try: if swap['amount0'].startswith('-'): amount0_in = AssetAmount(FVal(ZERO)) amount0_out = deserialize_asset_amount_force_positive( swap['amount0']) amount1_in = deserialize_asset_amount_force_positive( swap['amount1']) amount1_out = AssetAmount(FVal(ZERO)) else: amount0_in = deserialize_asset_amount_force_positive( swap['amount0']) amount0_out = AssetAmount(FVal(ZERO)) amount1_in = AssetAmount(FVal(ZERO)) amount1_out = deserialize_asset_amount_force_positive( swap['amount1']) except ValueError as e: log.error( f'Failed to read amounts in Uniswap V3 swap {str(swap)}. ' f'{str(e)}.', ) continue swaps.append( AMMSwap( tx_hash=swap['id'].split('#')[0], log_index=int(swap['logIndex']), address=address, from_address=from_address_deserialized, to_address=to_address_deserialized, timestamp=Timestamp(int(timestamp)), location=Location.UNISWAP, token0=token0, token1=token1, amount0_in=amount0_in, amount1_in=amount1_in, amount0_out=amount0_out, amount1_out=amount1_out, )) # with the new logic the list of swaps can be empty, in that case don't try # to make trades from the swaps if len(swaps) == 0: continue # Now that we got all swaps for a transaction, create the trade object trades.extend(self._tx_swaps_to_trades(swaps)) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } return trades def get_balances( self, addresses: List[ChecksumEthAddress], ) -> AddressToLPBalances: """Get the addresses' balances in the Uniswap protocol Premium users can request balances either via the Uniswap subgraph or on-chain. """ if self.premium: protocol_balance = self._get_balances_graph(addresses=addresses) else: protocol_balance = self.get_balances_chain(addresses) known_assets = protocol_balance.known_assets unknown_assets = protocol_balance.unknown_assets known_asset_price = self._get_known_asset_price( known_assets=known_assets, unknown_assets=unknown_assets, ) unknown_asset_price: AssetToPrice = {} if self.premium: unknown_asset_price = self._get_unknown_asset_price_graph( unknown_assets=unknown_assets) # noqa:E501 self._update_assets_prices_in_address_balances( address_balances=protocol_balance.address_balances, known_asset_price=known_asset_price, unknown_asset_price=unknown_asset_price, ) return protocol_balance.address_balances def get_trades_history( self, addresses: List[ChecksumEthAddress], reset_db_data: bool, from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> AddressTrades: """Get the addresses' trades history in the Uniswap protocol""" with self.trades_lock: if reset_db_data is True: self.database.delete_uniswap_trades_data() trades = self._get_trades( addresses=addresses, from_timestamp=from_timestamp, to_timestamp=to_timestamp, only_cache=False, ) return trades def delete_events_data(self) -> None: self.database.delete_uniswap_events_data() def deactivate(self) -> None: self.database.delete_uniswap_trades_data() self.database.delete_uniswap_events_data() def on_account_addition( self, address: ChecksumEthAddress) -> Optional[List['AssetBalance']]: pass def on_account_removal(self, address: ChecksumEthAddress) -> None: pass
class YearnVaultsV2Graph: """Reads Yearn vaults v2 information from the graph""" def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, ) -> None: self.ethereum = ethereum_manager self.database = database self.msg_aggregator = msg_aggregator self.premium = premium self.graph = Graph( 'https://api.thegraph.com/subgraphs/name/salazarguille/yearn-vaults-v2-subgraph-mainnet' ) # noqa: E501 def _process_event( self, events: List[Dict[str, Any]], event_type: Literal['deposit', 'withdraw'], ) -> List[YearnVaultEvent]: result = [] for entry in events: # The id returned is a composition of hash + '-' + log_index try: _, tx_hash, log_index, _ = entry['id'].split('-') except ValueError as e: log.debug( f'Failed to extract transaction hash and log index from {event_type} event ' f'in yearn vaults v2 graph query. Got {entry["id"]}. {str(e)}.', ) self.msg_aggregator.add_warning( f'Ignoring {event_type} in yearn vault V2. Failed to read id {entry["id"]}', ) continue try: if event_type == 'deposit': from_asset = EthereumToken(entry['vault']['token']['id']) to_asset = EthereumToken( entry['vault']['shareToken']['id']) elif event_type == 'withdraw': from_asset = EthereumToken( entry['vault']['shareToken']['id']) to_asset = EthereumToken(entry['vault']['token']['id']) except UnknownAsset: if event_type == 'deposit': from_str = entry['vault']['token']['symbol'] to_str = entry['vault']['shareToken']['symbol'] elif event_type == 'withdraw': from_str = entry['vault']['shareToken']['symbol'] to_str = entry['vault']['token']['symbol'] self.msg_aggregator.add_warning( f'Ignoring {event_type} in yearn vaults V2 from {from_str} to ' f'{to_str} because the token is not recognized.', ) continue except KeyError as e: log.debug( f'Failed to extract token information from {event_type} event ' f'in yearn vaults v2 graph query. {str(e)}.', ) self.msg_aggregator.add_warning( f'Ignoring {event_type} {tx_hash} in yearn vault V2 Failed to decode' f' remote information. ', ) continue try: from_asset_usd_price = get_usd_price_zero_if_error( asset=from_asset, time=Timestamp(int(entry['timestamp']) // 1000), location=f'yearn vault v2 deposit {tx_hash}', msg_aggregator=self.msg_aggregator, ) to_asset_usd_price = get_usd_price_zero_if_error( asset=to_asset, time=Timestamp(int(entry['timestamp']) // 1000), location=f'yearn v2 vault deposit {tx_hash}', msg_aggregator=self.msg_aggregator, ) if event_type == 'deposit': from_asset_amount = token_normalized_value( token_amount=int(entry['tokenAmount']), token=from_asset, ) to_asset_amount = token_normalized_value( token_amount=int(entry['sharesMinted']), token=to_asset, ) elif event_type == 'withdraw': from_asset_amount = token_normalized_value( token_amount=int(entry['sharesBurnt']), token=from_asset, ) to_asset_amount = token_normalized_value( token_amount=int(entry['tokenAmount']), token=to_asset, ) result.append( YearnVaultEvent( event_type=event_type, block_number=int(entry['blockNumber']), timestamp=Timestamp(int(entry['timestamp']) // 1000), from_asset=from_asset, from_value=Balance( amount=from_asset_amount, usd_value=from_asset_amount * from_asset_usd_price, ), to_asset=to_asset, to_value=Balance( amount=to_asset_amount, usd_value=to_asset_amount * to_asset_usd_price, ), realized_pnl=None, tx_hash=tx_hash, log_index=int(log_index), version=2, )) except (KeyError, ValueError) as e: msg = str(e) if isinstance(e, KeyError): msg = f'Missing key entry for {msg}.' log.error( f'Failed to read {event_type} from yearn vaults v2 graph because the response' f' does not have the expected output.', error=msg, ) self.msg_aggregator.add_warning( f'Ignoring {event_type} {tx_hash} in yearn vault V2 from {from_asset} to ' f'{to_asset} because the remote information is not correct.', ) continue return result def get_all_events( self, addresses: List[EthAddress], from_block: int, to_block: int, ) -> Dict[ChecksumEthAddress, Dict[str, List[YearnVaultEvent]]]: param_types = { '$from_block': 'BigInt!', '$to_block': 'BigInt!', '$addresses': '[Bytes!]', } param_values = { 'from_block': from_block, 'to_block': to_block, 'addresses': addresses, } querystr = format_query_indentation(QUERY_USER_EVENTS.format()) query = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) result: Dict[ChecksumEthAddress, Dict[str, List[YearnVaultEvent]]] = {} for account in query['accounts']: account_id = to_checksum_address(account['id']) result[account_id] = {} result[account_id]['deposits'] = self._process_event( account['deposits'], 'deposit') result[account_id]['withdrawals'] = self._process_event( account['withdrawals'], 'withdraw', ) return result
class EthereumManager(): def __init__( self, ethrpc_endpoint: str, etherscan: Etherscan, database: DBHandler, msg_aggregator: MessagesAggregator, greenlet_manager: GreenletManager, connect_at_start: Sequence[NodeName], eth_rpc_timeout: int = DEFAULT_ETH_RPC_TIMEOUT, ) -> None: log.debug( f'Initializing Ethereum Manager with own rpc endpoint: {ethrpc_endpoint}' ) self.greenlet_manager = greenlet_manager self.web3_mapping: Dict[NodeName, Web3] = {} self.own_rpc_endpoint = ethrpc_endpoint self.etherscan = etherscan self.msg_aggregator = msg_aggregator self.eth_rpc_timeout = eth_rpc_timeout self.transactions = EthTransactions( database=database, etherscan=etherscan, msg_aggregator=msg_aggregator, ) for node in connect_at_start: self.greenlet_manager.spawn_and_track( after_seconds=None, task_name=f'Attempt connection to {str(node)} ethereum node', exception_is_error=True, method=self.attempt_connect, name=node, ethrpc_endpoint=node.endpoint(self.own_rpc_endpoint), mainnet_check=True, ) self.blocks_subgraph = Graph( 'https://api.thegraph.com/subgraphs/name/blocklytics/ethereum-blocks', ) def connected_to_any_web3(self) -> bool: return (NodeName.OWN in self.web3_mapping or NodeName.MYCRYPTO in self.web3_mapping or NodeName.BLOCKSCOUT in self.web3_mapping or NodeName.AVADO_POOL in self.web3_mapping) def default_call_order(self, skip_etherscan: bool = False) -> List[NodeName]: """Default call order for ethereum nodes Own node always has preference. Then all other node types are randomly queried in sequence depending on a weighted probability. Some benchmarks on weighted probability based random selection when compared to simple random selection. Benchmark was on blockchain balance querying with 29 ethereum accounts and at the time 1010 different ethereum tokens. With weights: etherscan: 0.5, mycrypto: 0.25, blockscout: 0.2, avado: 0.05 ===> Runs: 66, 58, 60, 68, 58 seconds ---> Average: 62 seconds - Without weights ===> Runs: 66, 82, 72, 58, 72 seconds ---> Average: 70 seconds """ result = [] if NodeName.OWN in self.web3_mapping: result.append(NodeName.OWN) selection = list(OPEN_NODES) if skip_etherscan: selection.remove(NodeName.ETHERSCAN) ordered_list = [] while len(selection) != 0: weights = [] for entry in selection: weights.append(OPEN_NODES_WEIGHT_MAP[entry]) node = random.choices(selection, weights, k=1) ordered_list.append(node[0]) selection.remove(node[0]) return result + ordered_list def attempt_connect( self, name: NodeName, ethrpc_endpoint: str, mainnet_check: bool = True, ) -> Tuple[bool, str]: """Attempt to connect to a particular node type For our own node if the given rpc endpoint is not the same as the saved one the connection is re-attempted to the new one """ message = '' node_connected = self.web3_mapping.get(name, None) is not None own_node_already_connected = (name == NodeName.OWN and self.own_rpc_endpoint == ethrpc_endpoint and node_connected) if own_node_already_connected or (node_connected and name != NodeName.OWN): return True, 'Already connected to an ethereum node' try: parsed_eth_rpc_endpoint = urlparse(ethrpc_endpoint) if not parsed_eth_rpc_endpoint.scheme: ethrpc_endpoint = f"http://{ethrpc_endpoint}" provider = HTTPProvider( endpoint_uri=ethrpc_endpoint, request_kwargs={'timeout': self.eth_rpc_timeout}, ) ens = ENS(provider) web3 = Web3(provider, ens=ens) web3.middleware_onion.inject(http_retry_request_middleware, layer=0) except requests.exceptions.RequestException: message = f'Failed to connect to ethereum node {name} at endpoint {ethrpc_endpoint}' log.warning(message) return False, message try: is_connected = web3.isConnected() except AssertionError: # Terrible, terrible hack but needed due to https://github.com/rotki/rotki/issues/1817 is_connected = False if is_connected: # Also make sure we are actually connected to the Ethereum mainnet synchronized = True msg = '' try: if mainnet_check: network_id = int(web3.net.version) if network_id != 1: message = ( f'Connected to ethereum node {name} at endpoint {ethrpc_endpoint} but ' f'it is not on the ethereum mainnet. The chain id ' f'the node is in is {network_id}.') log.warning(message) return False, message current_block = web3.eth.block_number # pylint: disable=no-member try: latest_block = self.query_eth_highest_block() except RemoteError: msg = 'Could not query latest block' log.warning(msg) synchronized = False else: synchronized, msg = _is_synchronized( current_block, latest_block) except ValueError as e: message = ( f'Failed to connect to ethereum node {name} at endpoint ' f'{ethrpc_endpoint} due to {str(e)}') return False, message if not synchronized: self.msg_aggregator.add_warning( f'We could not verify that ethereum node {name} is ' 'synchronized with the ethereum mainnet. Balances and other queries ' 'may be incorrect.', ) log.info(f'Connected ethereum node {name} at {ethrpc_endpoint}') self.web3_mapping[name] = web3 return True, '' # else message = f'Failed to connect to ethereum node {name} at endpoint {ethrpc_endpoint}' log.warning(message) return False, message def set_rpc_endpoint(self, endpoint: str) -> Tuple[bool, str]: """ Attempts to set the RPC endpoint for the user's own ethereum node Returns a tuple (result, message) - result: Boolean for success or failure of changing the rpc endpoint - message: A message containing information on what happened. Can be populated both in case of success or failure""" if endpoint == '': self.web3_mapping.pop(NodeName.OWN, None) self.own_rpc_endpoint = '' return True, '' # else result, message = self.attempt_connect(name=NodeName.OWN, ethrpc_endpoint=endpoint) if result: log.info('Setting own node ETH RPC endpoint', endpoint=endpoint) self.own_rpc_endpoint = endpoint return result, message def query(self, method: Callable, call_order: Sequence[NodeName], **kwargs: Any) -> Any: """Queries ethereum related data by performing the provided method to all given nodes The first node in the call order that gets a succcesful response returns. If none get a result then a remote error is raised """ for node in call_order: web3 = self.web3_mapping.get(node, None) if web3 is None and node != NodeName.ETHERSCAN: continue try: result = method(web3, **kwargs) except (RemoteError, BlockchainQueryError, requests.exceptions.RequestException) as e: log.warning( f'Failed to query {node} for {str(method)} due to {str(e)}' ) # Catch all possible errors here and just try next node call continue return result # no node in the call order list was succesfully queried raise RemoteError( f'Failed to query {str(method)} after trying the following ' f'nodes: {[str(x) for x in call_order]}. Check logs for details.', ) def _get_latest_block_number(self, web3: Optional[Web3]) -> int: if web3 is not None: return web3.eth.block_number # else return self.etherscan.get_latest_block_number() def get_latest_block_number(self, call_order: Optional[Sequence[NodeName]] = None ) -> int: return self.query( method=self._get_latest_block_number, call_order=call_order if call_order is not None else self.default_call_order(), ) def query_eth_highest_block(self) -> BlockNumber: """ Attempts to query an external service for the block height Returns the highest blockNumber May Raise RemoteError if querying fails """ url = 'https://api.blockcypher.com/v1/eth/main' log.debug('Querying blockcypher for ETH highest block', url=url) eth_resp: Optional[Dict[str, str]] try: eth_resp = request_get_dict(url) except (RemoteError, UnableToDecryptRemoteData, requests.exceptions.RequestException): eth_resp = None block_number: Optional[int] if eth_resp and 'height' in eth_resp: block_number = int(eth_resp['height']) log.debug('ETH highest block result', block=block_number) else: block_number = self.etherscan.get_latest_block_number() log.debug('ETH highest block result', block=block_number) return BlockNumber(block_number) def get_eth_balance(self, account: ChecksumEthAddress) -> FVal: """Gets the balance of the given account in ETH May raise: - RemoteError if Etherscan is used and there is a problem querying it or parsing its response """ result = self.get_multieth_balance([account]) return result[account] def get_multieth_balance( self, accounts: List[ChecksumEthAddress], call_order: Optional[Sequence[NodeName]] = None, ) -> Dict[ChecksumEthAddress, FVal]: """Returns a dict with keys being accounts and balances in ETH May raise: - RemoteError if an external service such as Etherscan is queried and there is a problem with its query. """ balances: Dict[ChecksumEthAddress, FVal] = {} log.debug( 'Querying ethereum chain for ETH balance', eth_addresses=accounts, ) result = ETH_SCAN.call( ethereum=self, method_name='etherBalances', arguments=[accounts], call_order=call_order if call_order is not None else self.default_call_order(), ) balances = {} for idx, account in enumerate(accounts): balances[account] = from_wei(result[idx]) return balances def get_block_by_number( self, num: int, call_order: Optional[Sequence[NodeName]] = None, ) -> Dict[str, Any]: return self.query( method=self._get_block_by_number, call_order=call_order if call_order is not None else self.default_call_order(), num=num, ) def _get_block_by_number(self, web3: Optional[Web3], num: int) -> Dict[str, Any]: """Returns the block object corresponding to the given block number May raise: - RemoteError if an external service such as Etherscan is queried and there is a problem with its query. """ if web3 is None: return self.etherscan.get_block_by_number(num) block_data: MutableAttributeDict = MutableAttributeDict( web3.eth.get_block(num)) # type: ignore # pylint: disable=no-member # noqa: E501 block_data['hash'] = hex_or_bytes_to_str(block_data['hash']) return dict(block_data) def get_code( self, account: ChecksumEthAddress, call_order: Optional[Sequence[NodeName]] = None, ) -> str: return self.query( method=self._get_code, call_order=call_order if call_order is not None else self.default_call_order(), account=account, ) def _get_code(self, web3: Optional[Web3], account: ChecksumEthAddress) -> str: """Gets the deployment bytecode at the given address May raise: - RemoteError if Etherscan is used and there is a problem querying it or parsing its response """ if web3 is None: return self.etherscan.get_code(account) return hex_or_bytes_to_str(web3.eth.getCode(account)) @overload def ens_lookup( self, name: str, blockchain: Literal[ SupportedBlockchain.ETHEREUM] = SupportedBlockchain.ETHEREUM, call_order: Optional[Sequence[NodeName]] = None, ) -> Optional[ChecksumEthAddress]: ... @overload def ens_lookup( self, name: str, blockchain: Literal[SupportedBlockchain.BITCOIN, SupportedBlockchain.KUSAMA, ], call_order: Optional[Sequence[NodeName]] = None, ) -> Optional[HexStr]: ... def ens_lookup( self, name: str, blockchain: SupportedBlockchain = SupportedBlockchain.ETHEREUM, call_order: Optional[Sequence[NodeName]] = None, ) -> Optional[Union[ChecksumEthAddress, HexStr]]: return self.query( method=self._ens_lookup, call_order=call_order if call_order is not None else self.default_call_order(), name=name, blockchain=blockchain, ) @overload def _ens_lookup( self, web3: Optional[Web3], name: str, blockchain: Literal[SupportedBlockchain.ETHEREUM], ) -> Optional[ChecksumEthAddress]: ... @overload def _ens_lookup( self, web3: Optional[Web3], name: str, blockchain: Literal[SupportedBlockchain.BITCOIN, SupportedBlockchain.KUSAMA, ], ) -> Optional[HexStr]: ... def _ens_lookup( self, web3: Optional[Web3], name: str, blockchain: SupportedBlockchain = SupportedBlockchain.ETHEREUM, ) -> Optional[Union[ChecksumEthAddress, HexStr]]: """Performs an ENS lookup and returns address if found else None TODO: currently web3.py 5.15.0 does not support multichain ENS domains (EIP-2304), therefore requesting a non-Ethereum address won't use the web3 ens library and will require to extend the library resolver ABI. An issue in their repo (#1839) reporting the lack of support has been created. This function will require refactoring once they include support for EIP-2304. https://github.com/ethereum/web3.py/issues/1839 May raise: - RemoteError if Etherscan is used and there is a problem querying it or parsing its response - InputError if the given name is not a valid ENS name """ try: normal_name = normalize_name(name) except InvalidName as e: raise InputError(str(e)) from e resolver_addr = self._call_contract( web3=web3, contract_address=ENS_MAINNET_ADDR, abi=ENS_ABI, method_name='resolver', arguments=[normal_name_to_hash(normal_name)], ) if is_none_or_zero_address(resolver_addr): return None ens_resolver_abi = ENS_RESOLVER_ABI.copy() arguments = [normal_name_to_hash(normal_name)] if blockchain != SupportedBlockchain.ETHEREUM: ens_resolver_abi.extend(ENS_RESOLVER_ABI_MULTICHAIN_ADDRESS) arguments.append(blockchain.ens_coin_type()) try: deserialized_resolver_addr = deserialize_ethereum_address( resolver_addr) except DeserializationError: log.error( f'Error deserializing address {resolver_addr} while doing' f'ens lookup', ) return None address = self._call_contract( web3=web3, contract_address=deserialized_resolver_addr, abi=ens_resolver_abi, method_name='addr', arguments=arguments, ) if is_none_or_zero_address(address): return None if blockchain != SupportedBlockchain.ETHEREUM: return HexStr(address.hex()) try: return deserialize_ethereum_address(address) except DeserializationError: log.error(f'Error deserializing address {address}') return None def _call_contract_etherscan( self, contract_address: ChecksumEthAddress, abi: List, method_name: str, arguments: Optional[List[Any]] = None, ) -> Any: """Performs an eth_call to an ethereum contract via etherscan May raise: - RemoteError if there is a problem with reaching etherscan or with the returned result """ web3 = Web3() contract = web3.eth.contract(address=contract_address, abi=abi) input_data = contract.encodeABI(method_name, args=arguments if arguments else []) result = self.etherscan.eth_call( to_address=contract_address, input_data=input_data, ) if result == '0x': raise BlockchainQueryError( f'Error doing call on contract {contract_address} for {method_name} ' f'with arguments: {str(arguments)} via etherscan. Returned 0x result', ) fn_abi = contract._find_matching_fn_abi( fn_identifier=method_name, args=arguments, ) output_types = get_abi_output_types(fn_abi) output_data = web3.codec.decode_abi(output_types, bytes.fromhex(result[2:])) if len(output_data) == 1: # due to https://github.com/PyCQA/pylint/issues/4114 return output_data[0] # pylint: disable=unsubscriptable-object return output_data def _get_transaction_receipt( self, web3: Optional[Web3], tx_hash: str, ) -> Dict[str, Any]: if web3 is None: tx_receipt = self.etherscan.get_transaction_receipt(tx_hash) try: # Turn hex numbers to int block_number = int(tx_receipt['blockNumber'], 16) tx_receipt['blockNumber'] = block_number tx_receipt['cumulativeGasUsed'] = int( tx_receipt['cumulativeGasUsed'], 16) tx_receipt['gasUsed'] = int(tx_receipt['gasUsed'], 16) tx_receipt['status'] = int(tx_receipt['status'], 16) tx_index = int(tx_receipt['transactionIndex'], 16) tx_receipt['transactionIndex'] = tx_index for receipt_log in tx_receipt['logs']: receipt_log['blockNumber'] = block_number receipt_log['logIndex'] = deserialize_int_from_hex( symbol=receipt_log['logIndex'], location='etherscan tx receipt', ) receipt_log['transactionIndex'] = tx_index except (DeserializationError, ValueError) as e: raise RemoteError( f'Couldnt deserialize transaction receipt data from etherscan {tx_receipt}', ) from e return tx_receipt tx_receipt = web3.eth.get_transaction_receipt(tx_hash) # type: ignore return process_result(tx_receipt) def get_transaction_receipt( self, tx_hash: str, call_order: Optional[Sequence[NodeName]] = None, ) -> Dict[str, Any]: return self.query( method=self._get_transaction_receipt, call_order=call_order if call_order is not None else self.default_call_order(), tx_hash=tx_hash, ) def call_contract( self, contract_address: ChecksumEthAddress, abi: List, method_name: str, arguments: Optional[List[Any]] = None, call_order: Optional[Sequence[NodeName]] = None, ) -> Any: return self.query( method=self._call_contract, call_order=call_order if call_order is not None else self.default_call_order(), contract_address=contract_address, abi=abi, method_name=method_name, arguments=arguments, ) def _call_contract( self, web3: Optional[Web3], contract_address: ChecksumEthAddress, abi: List, method_name: str, arguments: Optional[List[Any]] = None, ) -> Any: """Performs an eth_call to an ethereum contract May raise: - RemoteError if etherscan is used and there is a problem with reaching it or with the returned result - BlockchainQueryError if web3 is used and there is a VM execution error """ if web3 is None: return self._call_contract_etherscan( contract_address=contract_address, abi=abi, method_name=method_name, arguments=arguments, ) contract = web3.eth.contract(address=contract_address, abi=abi) try: method = getattr(contract.caller, method_name) result = method(*arguments if arguments else []) except (ValueError, BadFunctionCallOutput) as e: raise BlockchainQueryError( f'Error doing call on contract {contract_address}: {str(e)}', ) from e return result def get_logs( self, contract_address: ChecksumEthAddress, abi: List, event_name: str, argument_filters: Dict[str, Any], from_block: int, to_block: Union[int, Literal['latest']] = 'latest', call_order: Optional[Sequence[NodeName]] = None, ) -> List[Dict[str, Any]]: if call_order is None: # Default call order for logs call_order = (NodeName.OWN, NodeName.ETHERSCAN) return self.query( method=self._get_logs, call_order=call_order, contract_address=contract_address, abi=abi, event_name=event_name, argument_filters=argument_filters, from_block=from_block, to_block=to_block, ) def _get_logs( self, web3: Optional[Web3], contract_address: ChecksumEthAddress, abi: List, event_name: str, argument_filters: Dict[str, Any], from_block: int, to_block: Union[int, Literal['latest']] = 'latest', ) -> List[Dict[str, Any]]: """Queries logs of an ethereum contract May raise: - RemoteError if etherscan is used and there is a problem with reaching it or with the returned result """ event_abi = find_matching_event_abi(abi=abi, event_name=event_name) _, filter_args = construct_event_filter_params( event_abi=event_abi, abi_codec=Web3().codec, contract_address=contract_address, argument_filters=argument_filters, fromBlock=from_block, toBlock=to_block, ) if event_abi['anonymous']: # web3.py does not handle the anonymous events correctly and adds the first topic filter_args['topics'] = filter_args['topics'][1:] events: List[Dict[str, Any]] = [] start_block = from_block if web3 is not None: events = _query_web3_get_logs( web3=web3, filter_args=filter_args, from_block=from_block, to_block=to_block, contract_address=contract_address, event_name=event_name, argument_filters=argument_filters, ) else: # etherscan until_block = (self.etherscan.get_latest_block_number() if to_block == 'latest' else to_block) blocks_step = 300000 while start_block <= until_block: while True: # loop to continuously reduce block range if need b end_block = min(start_block + blocks_step, until_block) try: new_events = self.etherscan.get_logs( contract_address=contract_address, topics=filter_args['topics'], # type: ignore from_block=start_block, to_block=end_block, ) except RemoteError as e: if 'Please select a smaller result dataset' in str(e): blocks_step = blocks_step // 2 if blocks_step < 100: raise # stop trying # else try with the smaller step continue # else some other error raise break # we must have a result # Turn all Hex ints to ints for e_idx, event in enumerate(new_events): try: block_number = deserialize_int_from_hex( symbol=event['blockNumber'], location='etherscan log query', ) log_index = deserialize_int_from_hex( symbol=event['logIndex'], location='etherscan log query', ) # Try to see if the event is a duplicate that got returned # in the previous iteration for previous_event in reversed(events): if previous_event['blockNumber'] < block_number: break same_event = (previous_event['logIndex'] == log_index and previous_event['transactionHash'] == event['transactionHash']) if same_event: events.pop() new_events[e_idx][ 'address'] = deserialize_ethereum_address( event['address'], ) new_events[e_idx]['blockNumber'] = block_number new_events[e_idx][ 'timeStamp'] = deserialize_int_from_hex( symbol=event['timeStamp'], location='etherscan log query', ) new_events[e_idx][ 'gasPrice'] = deserialize_int_from_hex( symbol=event['gasPrice'], location='etherscan log query', ) new_events[e_idx][ 'gasUsed'] = deserialize_int_from_hex( symbol=event['gasUsed'], location='etherscan log query', ) new_events[e_idx]['logIndex'] = log_index new_events[e_idx][ 'transactionIndex'] = deserialize_int_from_hex( symbol=event['transactionIndex'], location='etherscan log query', ) except DeserializationError as e: raise RemoteError( 'Couldnt decode an etherscan event due to {str(e)}}', ) from e # etherscan will only return 1000 events in one go. If more than 1000 # are returned such as when no filter args are provided then continue # the query from the last block if len(new_events) == 1000: start_block = new_events[-1]['blockNumber'] else: start_block = end_block + 1 events.extend(new_events) return events def get_event_timestamp(self, event: Dict[str, Any]) -> Timestamp: """Reads an event returned either by etherscan or web3 and gets its timestamp Etherscan events contain a timestamp. Normal web3 events don't so it needs to be queried from the block number WE could also add this to the get_logs() call but would add unnecessary rpc calls for get_block_by_number() for each log entry. Better have it lazy queried like this. TODO: Perhaps better approach would be a log event class for this """ if 'timeStamp' in event: # event from etherscan return Timestamp(event['timeStamp']) # event from web3 block_number = event['blockNumber'] block_data = self.get_block_by_number(block_number) return Timestamp(block_data['timestamp']) def _get_blocknumber_by_time_from_subgraph(self, ts: Timestamp) -> int: """Queries Ethereum Blocks Subgraph for closest block at or before given timestamp""" response = self.blocks_subgraph.query( f""" {{ blocks( first: 1, orderBy: timestamp, orderDirection: desc, where: {{timestamp_lte: "{ts}"}} ) {{ id number timestamp }} }} """, ) try: result = int(response['blocks'][0]['number']) except (IndexError, KeyError) as e: raise RemoteError( f'Got unexpected ethereum blocks subgraph response: {response}', ) from e else: return result def get_blocknumber_by_time(self, ts: Timestamp, etherscan: bool = True) -> int: """Searches for the blocknumber of a specific timestamp - Performs the etherscan api call by default first - If RemoteError raised or etherscan flag set to false -> queries blocks subgraph """ if etherscan: try: return self.etherscan.get_blocknumber_by_time(ts) except RemoteError: pass return self._get_blocknumber_by_time_from_subgraph(ts) def get_basic_contract_info(self, address: ChecksumEthAddress) -> Dict[str, Any]: """ Query a contract address and return basic information as: - Decimals - name - symbol if it is provided in the contract. This method may raise: - BadFunctionCallOutput: If there is an error calling a bad address """ properties = ('decimals', 'symbol', 'name') info: Dict[str, Any] = {} contract = EthereumContract(address=address, abi=ERC20TOKEN_ABI, deployed_block=0) try: # Output contains call status and result output = multicall_2( ethereum=self, require_success=False, calls=[(address, contract.encode(method_name=prop)) for prop in properties], ) except RemoteError: # If something happens in the connection the output should have # the same length as the tuple of properties output = [(False, b'')] * len(properties) decoded = [ contract.decode(x[1], method_name)[0] # pylint: disable=E1136 if x[0] and len(x[1]) else None for (x, method_name) in zip(output, properties) ] for prop, value in zip(properties, decoded): info[prop] = value return info
class Uniswap(EthereumModule): """Uniswap integration module * Uniswap subgraph: https://github.com/Uniswap/uniswap-v2-subgraph """ def __init__( self, ethereum_manager: 'EthereumManager', database: 'DBHandler', premium: Optional[Premium], msg_aggregator: MessagesAggregator, ) -> None: self.ethereum = ethereum_manager self.database = database self.premium = premium self.msg_aggregator = msg_aggregator self.data_directory = database.user_data_dir.parent self.trades_lock = Semaphore() try: self.graph = Graph( 'https://api.thegraph.com/subgraphs/name/uniswap/uniswap-v2', ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e))) raise ModuleInitializationFailure('subgraph remote error') from e @staticmethod def _calculate_events_balances( address: ChecksumEthAddress, events: List[LiquidityPoolEvent], balances: List[LiquidityPool], ) -> List[LiquidityPoolEventsBalance]: """Given an address, its LP events and the current LPs participating in (`balances`), process each event (grouped by pool) aggregating the token0, token1 and USD amounts for calculating the profit/loss in the pool. Finally return a list of <LiquidityPoolEventsBalance>, where each contains the profit/loss and events per pool. If `balances` is empty that means either the address does not have balances in the protocol or the endpoint has been called with a specific time range. """ events_balances: List[LiquidityPoolEventsBalance] = [] pool_balance: Dict[ChecksumEthAddress, LiquidityPool] = ({ pool.address: pool for pool in balances }) pool_aggregated_amount: Dict[ChecksumEthAddress, AggregatedAmount] = {} # Populate `pool_aggregated_amount` dict, being the keys the pools' # addresses and the values the aggregated amounts from their events for event in events: pool = event.pool_address if pool not in pool_aggregated_amount: pool_aggregated_amount[pool] = AggregatedAmount() pool_aggregated_amount[pool].events.append(event) if event.event_type == EventType.MINT: pool_aggregated_amount[pool].profit_loss0 -= event.amount0 pool_aggregated_amount[pool].profit_loss1 -= event.amount1 pool_aggregated_amount[pool].usd_profit_loss -= event.usd_price else: # event_type == EventType.BURN pool_aggregated_amount[pool].profit_loss0 += event.amount0 pool_aggregated_amount[pool].profit_loss1 += event.amount1 pool_aggregated_amount[pool].usd_profit_loss += event.usd_price # Instantiate `LiquidityPoolEventsBalance` per pool using # `pool_aggregated_amount`. If `pool_balance` exists (all events case), # factorise in the current pool balances in the totals. for pool, aggregated_amount in pool_aggregated_amount.items(): profit_loss0 = aggregated_amount.profit_loss0 profit_loss1 = aggregated_amount.profit_loss1 usd_profit_loss = aggregated_amount.usd_profit_loss # Add current pool balances looking up the pool if pool in pool_balance: token0 = pool_balance[pool].assets[0].asset token1 = pool_balance[pool].assets[1].asset profit_loss0 += pool_balance[pool].assets[ 0].user_balance.amount profit_loss1 += pool_balance[pool].assets[ 1].user_balance.amount usd_profit_loss += pool_balance[pool].user_balance.usd_value else: # NB: get `token0` and `token1` from any pool event token0 = aggregated_amount.events[0].token0 token1 = aggregated_amount.events[0].token1 events_balance = LiquidityPoolEventsBalance( address=address, pool_address=pool, token0=token0, token1=token1, events=aggregated_amount.events, profit_loss0=profit_loss0, profit_loss1=profit_loss1, usd_profit_loss=usd_profit_loss, ) events_balances.append(events_balance) return events_balances def _get_balances_graph( self, addresses: List[ChecksumEthAddress], ) -> ProtocolBalance: """Get the addresses' pools data querying the Uniswap subgraph Each liquidity position is converted into a <LiquidityPool>. """ address_balances: DDAddressBalances = defaultdict(list) known_assets: Set[EthereumToken] = set() unknown_assets: Set[UnknownEthereumToken] = set() addresses_lower = [address.lower() for address in addresses] querystr = format_query_indentation(LIQUIDITY_POSITIONS_QUERY.format()) param_types = { '$limit': 'Int!', '$offset': 'Int!', '$addresses': '[String!]', '$balance': 'BigDecimal!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'addresses': addresses_lower, 'balance': '0', } while True: try: result = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e))) raise result_data = result['liquidityPositions'] for lp in result_data: user_address = to_checksum_address(lp['user']['id']) user_lp_balance = FVal(lp['liquidityTokenBalance']) lp_pair = lp['pair'] lp_address = to_checksum_address(lp_pair['id']) lp_total_supply = FVal(lp_pair['totalSupply']) # Insert LP tokens reserves within tokens dicts token0 = lp_pair['token0'] token0['total_amount'] = lp_pair['reserve0'] token1 = lp_pair['token1'] token1['total_amount'] = lp_pair['reserve1'] liquidity_pool_assets = [] for token in token0, token1: # Get the token <EthereumToken> or <UnknownEthereumToken> asset = get_ethereum_token( symbol=token['symbol'], ethereum_address=to_checksum_address(token['id']), name=token['name'], decimals=int(token['decimals']), ) # Classify the asset either as known or unknown if isinstance(asset, EthereumToken): known_assets.add(asset) elif isinstance(asset, UnknownEthereumToken): unknown_assets.add(asset) # Estimate the underlying asset total_amount asset_total_amount = FVal(token['total_amount']) user_asset_balance = (user_lp_balance / lp_total_supply * asset_total_amount) liquidity_pool_asset = LiquidityPoolAsset( asset=asset, total_amount=asset_total_amount, user_balance=Balance(amount=user_asset_balance), ) liquidity_pool_assets.append(liquidity_pool_asset) liquidity_pool = LiquidityPool( address=lp_address, assets=liquidity_pool_assets, total_supply=lp_total_supply, user_balance=Balance(amount=user_lp_balance), ) address_balances[user_address].append(liquidity_pool) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } protocol_balance = ProtocolBalance( address_balances=dict(address_balances), known_assets=known_assets, unknown_assets=unknown_assets, ) return protocol_balance def get_balances_chain( self, addresses: List[ChecksumEthAddress]) -> ProtocolBalance: """Get the addresses' pools data via chain queries. """ known_assets: Set[EthereumToken] = set() unknown_assets: Set[UnknownEthereumToken] = set() lp_addresses = get_latest_lp_addresses(self.data_directory) address_mapping = {} for address in addresses: pool_balances = uniswap_lp_token_balances( address=address, ethereum=self.ethereum, lp_addresses=lp_addresses, known_assets=known_assets, unknown_assets=unknown_assets, ) if len(pool_balances) != 0: address_mapping[address] = pool_balances protocol_balance = ProtocolBalance( address_balances=address_mapping, known_assets=known_assets, unknown_assets=unknown_assets, ) return protocol_balance @staticmethod def _get_known_asset_price( known_assets: Set[EthereumToken], unknown_assets: Set[UnknownEthereumToken], ) -> AssetPrice: """Get the tokens prices via Inquirer Given an asset, if `find_usd_price()` returns zero, it will be added into `unknown_assets`. """ asset_price: AssetPrice = {} for known_asset in known_assets: asset_usd_price = Inquirer().find_usd_price(known_asset) if asset_usd_price != Price(ZERO): asset_price[known_asset.ethereum_address] = asset_usd_price else: unknown_asset = UnknownEthereumToken( ethereum_address=known_asset.ethereum_address, symbol=known_asset.identifier, name=known_asset.name, decimals=known_asset.decimals, ) unknown_assets.add(unknown_asset) return asset_price @staticmethod def _tx_swaps_to_trades(swaps: List[AMMSwap]) -> List[AMMTrade]: """ Turns a list of a transaction's swaps into a list of trades, taking into account the first and last swaps only for use with the rest of the rotki accounting. TODO: This is not nice, but we are constrained by the 1 token in 1 token out concept of a trade we have right now. So if in a swap we have both tokens in we will create two trades, with the final amount being divided between the 2 trades. This is only so that the AMM trade can be processed easily in our current trades accounting. Make issue to process this properly as multitrades when we change the trade format """ trades: List[AMMTrade] = [] both_in = False both_out = False if swaps[0].amount0_in > ZERO and swaps[0].amount1_in > ZERO: both_in = True if swaps[-1].amount0_out > ZERO and swaps[-1].amount1_out > ZERO: both_out = True if both_in: quote_assets = [ (swaps[0].token0, swaps[0].amount0_in if not both_out else swaps[0].amount0_in / 2), # noqa: E501 (swaps[0].token1, swaps[0].amount1_in if not both_out else swaps[0].amount1_in / 2), # noqa: E501 ] elif swaps[0].amount0_in > ZERO: quote_assets = [(swaps[0].token0, swaps[0].amount0_in)] else: quote_assets = [(swaps[0].token1, swaps[0].amount1_in)] trade_index = 0 if swaps[-1].amount0_out > ZERO: trades = add_trades_from_swaps( swaps=swaps, trades=trades, both_in=both_in, quote_assets=quote_assets, token_amount=swaps[-1].amount0_out, token=swaps[-1].token0, trade_index=trade_index, ) trade_index += len(trades) if swaps[-1].amount1_out > ZERO: trades = add_trades_from_swaps( swaps=swaps, trades=trades, both_in=both_in, quote_assets=quote_assets, token_amount=swaps[-1].amount1_out, token=swaps[-1].token1, trade_index=trade_index, ) return trades def _get_events_balances( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> AddressEventsBalances: """Request via graph all events for new addresses and the latest ones for already existing addresses. Then the requested events are written in DB and finally all DB events are read, and processed for calculating total profit/loss per LP (stored within <LiquidityPoolEventsBalance>). """ address_events_balances: AddressEventsBalances = {} address_events: DDAddressEvents = defaultdict(list) db_address_events: AddressEvents = {} new_addresses: List[ChecksumEthAddress] = [] existing_addresses: List[ChecksumEthAddress] = [] min_end_ts: Timestamp = to_timestamp # Get addresses' last used query range for Uniswap events for address in addresses: entry_name = f'{UNISWAP_EVENTS_PREFIX}_{address}' events_range = self.database.get_used_query_range(name=entry_name) if not events_range: new_addresses.append(address) else: existing_addresses.append(address) min_end_ts = min(min_end_ts, events_range[1]) # Request new addresses' events if new_addresses: start_ts = Timestamp(0) for address in new_addresses: for event_type in EventType: new_address_events = self._get_events_graph( address=address, start_ts=start_ts, end_ts=to_timestamp, event_type=event_type, ) if new_address_events: address_events[address].extend(new_address_events) # Insert new address' last used query range self.database.update_used_query_range( name=f'{UNISWAP_EVENTS_PREFIX}_{address}', start_ts=start_ts, end_ts=to_timestamp, ) # Request existing DB addresses' events if existing_addresses and to_timestamp > min_end_ts: for address in existing_addresses: for event_type in EventType: address_new_events = self._get_events_graph( address=address, start_ts=min_end_ts, end_ts=to_timestamp, event_type=event_type, ) if address_new_events: address_events[address].extend(address_new_events) # Update existing address' last used query range self.database.update_used_query_range( name=f'{UNISWAP_EVENTS_PREFIX}_{address}', start_ts=min_end_ts, end_ts=to_timestamp, ) # Insert requested events in DB all_events = [] for address in filter(lambda address: address in address_events, addresses): all_events.extend(address_events[address]) self.database.add_uniswap_events(all_events) # Fetch all DB events within the time range for address in addresses: db_events = self.database.get_uniswap_events( from_ts=from_timestamp, to_ts=to_timestamp, address=address, ) if db_events: # return events with the oldest first db_events.sort( key=lambda event: (event.timestamp, event.log_index)) db_address_events[address] = db_events # Request addresses' current balances (UNI-V2s and underlying tokens) # if there is no specific time range in this endpoint call (i.e. all # events). Current balances in the protocol are needed for an accurate # profit/loss calculation. # TODO: when this endpoint is called with a specific time range, # getting the balances and underlying tokens within that time range # requires an archive node. Feature pending to be developed. address_balances: AddressBalances = { } # Empty when specific time range if from_timestamp == Timestamp(0): address_balances = self.get_balances(addresses) # Calculate addresses' event balances (i.e. profit/loss per pool) for address, events in db_address_events.items(): balances = address_balances.get( address, []) # Empty when specific time range events_balances = self._calculate_events_balances( address=address, events=events, balances=balances, ) address_events_balances[address] = events_balances return address_events_balances def _get_events_graph( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, event_type: EventType, ) -> List[LiquidityPoolEvent]: """Get the address' events (mints & burns) querying the Uniswap subgraph Each event data is stored in a <LiquidityPoolEvent>. """ address_events: List[LiquidityPoolEvent] = [] if event_type == EventType.MINT: query = MINTS_QUERY query_schema = 'mints' elif event_type == EventType.BURN: query = BURNS_QUERY query_schema = 'burns' else: log.error( f'Unexpected event_type: {event_type}. Skipping events query.') return address_events param_types = { '$limit': 'Int!', '$offset': 'Int!', '$address': 'Bytes!', '$start_ts': 'BigInt!', '$end_ts': 'BigInt!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'address': address.lower(), 'start_ts': str(start_ts), 'end_ts': str(end_ts), } querystr = format_query_indentation(query.format()) while True: try: result = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e))) raise result_data = result[query_schema] for event in result_data: token0_ = event['pair']['token0'] token1_ = event['pair']['token1'] token0 = get_ethereum_token( symbol=token0_['symbol'], ethereum_address=to_checksum_address(token0_['id']), name=token0_['name'], decimals=token0_['decimals'], ) token1 = get_ethereum_token( symbol=token1_['symbol'], ethereum_address=to_checksum_address(token1_['id']), name=token1_['name'], decimals=int(token1_['decimals']), ) lp_event = LiquidityPoolEvent( tx_hash=event['transaction']['id'], log_index=int(event['logIndex']), address=address, timestamp=Timestamp(int(event['timestamp'])), event_type=event_type, pool_address=to_checksum_address(event['pair']['id']), token0=token0, token1=token1, amount0=AssetAmount(FVal(event['amount0'])), amount1=AssetAmount(FVal(event['amount1'])), usd_price=Price(FVal(event['amountUSD'])), lp_amount=AssetAmount(FVal(event['liquidity'])), ) address_events.append(lp_event) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } return address_events def _fetch_trades_from_db( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> AddressTrades: """Fetch all DB Uniswap trades within the time range""" db_address_trades: AddressTrades = {} for address in addresses: db_swaps = self.database.get_amm_swaps( from_ts=from_timestamp, to_ts=to_timestamp, location=Location.UNISWAP, address=address, ) db_trades = self.swaps_to_trades(db_swaps) if db_trades: db_address_trades[address] = db_trades return db_address_trades def _get_trades( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, only_cache: bool, ) -> AddressTrades: """Request via graph all trades for new addresses and the latest ones for already existing addresses. Then the requested trade are written in DB and finally all DB trades are read and returned. """ address_amm_trades: AddressTrades = {} new_addresses: List[ChecksumEthAddress] = [] existing_addresses: List[ChecksumEthAddress] = [] min_end_ts: Timestamp = to_timestamp if only_cache: return self._fetch_trades_from_db(addresses, from_timestamp, to_timestamp) # Get addresses' last used query range for Uniswap trades for address in addresses: entry_name = f'{UNISWAP_TRADES_PREFIX}_{address}' trades_range = self.database.get_used_query_range(name=entry_name) if not trades_range: new_addresses.append(address) else: existing_addresses.append(address) min_end_ts = min(min_end_ts, trades_range[1]) # Request new addresses' trades if new_addresses: start_ts = Timestamp(0) new_address_trades = self._get_trades_graph( addresses=new_addresses, start_ts=start_ts, end_ts=to_timestamp, ) address_amm_trades.update(new_address_trades) # Insert last used query range for new addresses for address in new_addresses: entry_name = f'{UNISWAP_TRADES_PREFIX}_{address}' self.database.update_used_query_range( name=entry_name, start_ts=start_ts, end_ts=to_timestamp, ) # Request existing DB addresses' trades if existing_addresses and to_timestamp > min_end_ts: address_new_trades = self._get_trades_graph( addresses=existing_addresses, start_ts=min_end_ts, end_ts=to_timestamp, ) address_amm_trades.update(address_new_trades) # Update last used query range for existing addresses for address in existing_addresses: entry_name = f'{UNISWAP_TRADES_PREFIX}_{address}' self.database.update_used_query_range( name=entry_name, start_ts=min_end_ts, end_ts=to_timestamp, ) # Insert all unique swaps to the D all_swaps = set() for address in filter(lambda address: address in address_amm_trades, addresses): for trade in address_amm_trades[address]: for swap in trade.swaps: all_swaps.add(swap) self.database.add_amm_swaps(list(all_swaps)) return self._fetch_trades_from_db(addresses, from_timestamp, to_timestamp) @staticmethod def swaps_to_trades(swaps: List[AMMSwap]) -> List[AMMTrade]: trades: List[AMMTrade] = [] if not swaps: return trades # sort by timestamp and then by log index swaps.sort(key=lambda trade: (trade.timestamp, -trade.log_index), reverse=True) last_tx_hash = swaps[0].tx_hash current_swaps: List[AMMSwap] = [] for swap in swaps: if swap.tx_hash != last_tx_hash: trades.extend(Uniswap._tx_swaps_to_trades(current_swaps)) current_swaps = [] current_swaps.append(swap) last_tx_hash = swap.tx_hash if len(current_swaps) != 0: trades.extend(Uniswap._tx_swaps_to_trades(current_swaps)) return trades def _get_trades_graph( self, addresses: List[ChecksumEthAddress], start_ts: Timestamp, end_ts: Timestamp, ) -> AddressTrades: address_trades = {} for address in addresses: trades = self._get_trades_graph_for_address( address, start_ts, end_ts) if len(trades) != 0: address_trades[address] = trades return address_trades def _get_trades_graph_for_address( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, ) -> List[AMMTrade]: """Get the address' trades data querying the Uniswap subgraph Each trade (swap) instantiates an <AMMTrade>. The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`. Translated to Uniswap lingo: Trade type BUY: - `asset1In` (QUOTE, reserve1) is gt 0. - `asset0Out` (BASE, reserve0) is gt 0. Trade type SELL: - `asset0In` (BASE, reserve0) is gt 0. - `asset1Out` (QUOTE, reserve1) is gt 0. """ trades: List[AMMTrade] = [] param_types = { '$limit': 'Int!', '$offset': 'Int!', '$address': 'Bytes!', '$start_ts': 'BigInt!', '$end_ts': 'BigInt!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'address': address.lower(), 'start_ts': str(start_ts), 'end_ts': str(end_ts), } querystr = format_query_indentation(SWAPS_QUERY.format()) while True: try: result = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e))) raise result_data = result['swaps'] for entry in result_data: swaps = [] for swap in entry['transaction']['swaps']: timestamp = swap['timestamp'] swap_token0 = swap['pair']['token0'] swap_token1 = swap['pair']['token1'] token0 = get_ethereum_token( symbol=swap_token0['symbol'], ethereum_address=to_checksum_address( swap_token0['id']), name=swap_token0['name'], decimals=swap_token0['decimals'], ) token1 = get_ethereum_token( symbol=swap_token1['symbol'], ethereum_address=to_checksum_address( swap_token1['id']), name=swap_token1['name'], decimals=int(swap_token1['decimals']), ) amount0_in = FVal(swap['amount0In']) amount1_in = FVal(swap['amount1In']) amount0_out = FVal(swap['amount0Out']) amount1_out = FVal(swap['amount1Out']) swaps.append( AMMSwap( tx_hash=swap['id'].split('-')[0], log_index=int(swap['logIndex']), address=address, from_address=to_checksum_address(swap['sender']), to_address=to_checksum_address(swap['to']), timestamp=Timestamp(int(timestamp)), location=Location.UNISWAP, token0=token0, token1=token1, amount0_in=AssetAmount(amount0_in), amount1_in=AssetAmount(amount1_in), amount0_out=AssetAmount(amount0_out), amount1_out=AssetAmount(amount1_out), )) # Now that we got all swaps for a transaction, create the trade object trades.extend(self._tx_swaps_to_trades(swaps)) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } return trades def _get_unknown_asset_price_graph( self, unknown_assets: Set[UnknownEthereumToken], ) -> AssetPrice: """Get today's tokens prices via the Uniswap subgraph Uniswap provides a token price every day at 00:00:00 UTC """ asset_price: AssetPrice = {} unknown_assets_addresses = ([ asset.ethereum_address for asset in unknown_assets ]) unknown_assets_addresses_lower = ([ address.lower() for address in unknown_assets_addresses ]) querystr = format_query_indentation(TOKEN_DAY_DATAS_QUERY.format()) today_epoch = int( datetime.combine(datetime.utcnow().date(), time.min).timestamp(), ) param_types = { '$limit': 'Int!', '$offset': 'Int!', '$token_ids': '[String!]', '$datetime': 'Int!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'token_ids': unknown_assets_addresses_lower, 'datetime': today_epoch, } while True: try: result = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e))) raise result_data = result['tokenDayDatas'] for tdd in result_data: token_address = to_checksum_address(tdd['token']['id']) asset_price[token_address] = Price(FVal(tdd['priceUSD'])) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } return asset_price @staticmethod def _update_assets_prices_in_address_balances( address_balances: AddressBalances, known_asset_price: AssetPrice, unknown_asset_price: AssetPrice, ) -> None: """Update the pools underlying assets prices in USD (prices obtained via Inquirer and the Uniswap subgraph) """ for lps in address_balances.values(): for lp in lps: # Try to get price from either known or unknown asset price. # Otherwise keep existing price (zero) total_user_balance = FVal(0) for asset in lp.assets: asset_ethereum_address = asset.asset.ethereum_address asset_usd_price = known_asset_price.get( asset_ethereum_address, unknown_asset_price.get(asset_ethereum_address, Price(ZERO)), ) # Update <LiquidityPoolAsset> if asset USD price exists if asset_usd_price != Price(ZERO): asset.usd_price = asset_usd_price asset.user_balance.usd_value = FVal( asset.user_balance.amount * asset_usd_price, ) total_user_balance += asset.user_balance.usd_value # Update <LiquidityPool> total balance in USD lp.user_balance.usd_value = total_user_balance def get_balances( self, addresses: List[ChecksumEthAddress], ) -> AddressBalances: """Get the addresses' balances in the Uniswap protocol Premium users can request balances either via the Uniswap subgraph or on-chain. """ if self.premium: protocol_balance = self._get_balances_graph(addresses=addresses) else: protocol_balance = self.get_balances_chain(addresses) known_assets = protocol_balance.known_assets unknown_assets = protocol_balance.unknown_assets known_asset_price = self._get_known_asset_price( known_assets=known_assets, unknown_assets=unknown_assets, ) unknown_asset_price: AssetPrice = {} if self.premium: unknown_asset_price = self._get_unknown_asset_price_graph( unknown_assets=unknown_assets) # noqa:E501 self._update_assets_prices_in_address_balances( address_balances=protocol_balance.address_balances, known_asset_price=known_asset_price, unknown_asset_price=unknown_asset_price, ) return protocol_balance.address_balances def get_events_history( self, addresses: List[ChecksumEthAddress], reset_db_data: bool, from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> AddressEventsBalances: """Get the addresses' events history in the Uniswap protocol """ with self.trades_lock: if reset_db_data is True: self.database.delete_uniswap_events_data() address_events_balances = self._get_events_balances( addresses=addresses, from_timestamp=from_timestamp, to_timestamp=to_timestamp, ) return address_events_balances def get_trades( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, only_cache: bool, ) -> List[AMMTrade]: with self.trades_lock: all_trades = [] trade_mapping = self._get_trades( addresses=addresses, from_timestamp=from_timestamp, to_timestamp=to_timestamp, only_cache=only_cache, ) for _, trades in trade_mapping.items(): all_trades.extend(trades) return all_trades def get_trades_history( self, addresses: List[ChecksumEthAddress], reset_db_data: bool, from_timestamp: Timestamp, to_timestamp: Timestamp, ) -> AddressTrades: """Get the addresses' trades history in the Uniswap protocol """ with self.trades_lock: if reset_db_data is True: self.database.delete_uniswap_trades_data() trades = self._get_trades( addresses=addresses, from_timestamp=from_timestamp, to_timestamp=to_timestamp, only_cache=False, ) return trades # -- Methods following the EthereumModule interface -- # def on_startup(self) -> None: pass def on_account_addition(self, address: ChecksumEthAddress) -> None: pass def on_account_removal(self, address: ChecksumEthAddress) -> None: pass def deactivate(self) -> None: self.database.delete_uniswap_trades_data() self.database.delete_uniswap_events_data()