def test_exception_retries(): """Test an exception raised by Client.execute() triggers the retry logic. """ graph = Graph(TEST_URL_1) param_types = {'$limit': 'Int!'} param_values = {'limit': 1} querystr = format_query_indentation(TEST_QUERY_1.format()) client = MagicMock() client.execute.side_effect = Exception("any message") backoff_factor_patch = patch( 'rotkehlchen.chain.ethereum.graph.RETRY_BACKOFF_FACTOR', new=0, ) client_patch = patch.object(graph, 'client', new=client) with ExitStack() as stack: stack.enter_context(backoff_factor_patch) stack.enter_context(client_patch) with pytest.raises(RemoteError) as e: graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) assert client.execute.call_count == QUERY_RETRY_TIMES assert 'No retries left' in str(e.value)
def test_success_result(): """Test a successful response returns result as expected and does not triggers the retry logic. """ expected_result = {"schema": [{"data1"}, {"data2"}]} graph = Graph(TEST_URL_1) param_types = {'$limit': 'Int!'} param_values = {'limit': 1} querystr = format_query_indentation(TEST_QUERY_1.format()) client = MagicMock() client.execute.return_value = expected_result backoff_factor_patch = patch( 'rotkehlchen.chain.ethereum.graph.RETRY_BACKOFF_FACTOR', return_value=0, ) client_patch = patch.object(graph, 'client', new=client) with ExitStack() as stack: stack.enter_context(backoff_factor_patch) stack.enter_context(client_patch) result = graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) assert client.execute.call_count == 1 assert result == expected_result
def _get_unknown_asset_price_graph( self, unknown_assets: Set[UnknownEthereumToken], ) -> AssetPrice: """Get today's tokens prices via the Uniswap subgraph Uniswap provides a token price every day at 00:00:00 UTC """ asset_price: AssetPrice = {} unknown_assets_addresses = ( [asset.ethereum_address for asset in unknown_assets] ) unknown_assets_addresses_lower = ( [address.lower() for address in unknown_assets_addresses] ) querystr = format_query_indentation(TOKEN_DAY_DATAS_QUERY.format()) today_epoch = int( datetime.combine(datetime.utcnow().date(), time.min).timestamp(), ) param_types = { '$limit': 'Int!', '$offset': 'Int!', '$token_ids': '[String!]', '$datetime': 'Int!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'token_ids': unknown_assets_addresses_lower, 'datetime': today_epoch, } while True: result = self.graph.query( # type: ignore # caller already checks querystr=querystr, param_types=param_types, param_values=param_values, ) result_data = result['tokenDayDatas'] for tdd in result_data: token_address = to_checksum_address(tdd['token']['id']) asset_price[token_address] = Price(FVal(tdd['priceUSD'])) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } return asset_price
def _get_raw_history( self, addresses: List[ChecksumEthAddress], query_for: Literal['stake', 'trove'], ) -> Dict[str, Any]: param_types = { '$addresses': '[Bytes!]', } param_values = { 'addresses': [addr.lower() for addr in addresses], } if query_for == 'trove': querystr = format_query_indentation(QUERY_TROVE) else: querystr = format_query_indentation(QUERY_STAKE) return self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, )
def get_all_events( self, addresses: List[EthAddress], from_block: int, to_block: int, ) -> Dict[ChecksumEthAddress, Dict[str, List[YearnVaultEvent]]]: param_types = { '$from_block': 'BigInt!', '$to_block': 'BigInt!', '$addresses': '[Bytes!]', } param_values = { 'from_block': from_block, 'to_block': to_block, 'addresses': addresses, } querystr = format_query_indentation(QUERY_USER_EVENTS.format()) query = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) result: Dict[ChecksumEthAddress, Dict[str, List[YearnVaultEvent]]] = {} for account in query['accounts']: account_id = to_checksum_address(account['id']) result[account_id] = {} result[account_id]['deposits'] = self._process_event( account['deposits'], 'deposit') result[account_id]['withdrawals'] = self._process_event( account['withdrawals'], 'withdraw', ) return result
def _get_staking_events_graph( self, addresses: List[ChecksumEthAddress], event_type: AdexEventType, from_timestamp: Optional[Timestamp] = None, to_timestamp: Optional[Timestamp] = None, ) -> Union[List[Bond], List[Unbond], List[UnbondRequest], List[ChannelWithdraw]]: """Get the addresses' events data querying the AdEx subgraph. """ identity_address_map = self._get_identity_address_map(addresses) user_identities = [ str(identity).lower() for identity in identity_address_map.keys() ] deserialization_method: DeserializationMethod querystr: str schema: Literal['bonds', 'unbonds', 'unbondRequests', 'channelWithdraws'] if event_type == AdexEventType.BOND: queried_addresses = user_identities deserialization_method = self._deserialize_bond querystr = format_query_indentation(BONDS_QUERY.format()) schema = 'bonds' event_type_pretty = 'bond' elif event_type == AdexEventType.UNBOND: queried_addresses = user_identities deserialization_method = self._deserialize_unbond querystr = format_query_indentation(UNBONDS_QUERY.format()) schema = 'unbonds' event_type_pretty = 'unbond' elif event_type == AdexEventType.UNBOND_REQUEST: queried_addresses = user_identities deserialization_method = self._deserialize_unbond_request querystr = format_query_indentation(UNBOND_REQUESTS_QUERY.format()) schema = 'unbondRequests' event_type_pretty = 'unbond request' elif event_type == AdexEventType.CHANNEL_WITHDRAW: queried_addresses = [address.lower() for address in addresses] deserialization_method = self._deserialize_channel_withdraw querystr = format_query_indentation( CHANNEL_WITHDRAWS_QUERY.format()) schema = 'channelWithdraws' event_type_pretty = 'channel withdraws' else: raise AssertionError(f'Unexpected AdEx event type: {event_type}.') start_ts = from_timestamp or 0 end_ts = to_timestamp or int(datetime.utcnow().timestamp()) param_types = { '$limit': 'Int!', '$offset': 'Int!', '$queried_addresses': '[Bytes!]', '$start_ts': 'Int!', '$end_ts': 'Int!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'queried_addresses': queried_addresses, 'start_ts': start_ts, 'end_ts': end_ts, } events = [] while True: try: result = self.graph.query( # type: ignore # caller already checks querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: msg = str(e) self.msg_aggregator.add_error( f'{msg}. All AdEx balances queries are not functioning until this is fixed.', ) raise result_data = result[schema] for raw_event in result_data: try: event = deserialization_method( raw_event=raw_event, identity_address_map=identity_address_map, ) except KeyError as e: msg = str(e) log.error( f'Error processing an AdEx {event_type_pretty}.', raw_event=raw_event, error=msg, ) self.msg_aggregator.add_error( f'Failed to deserialize an AdEx {event_type_pretty}. ' f'Check logs for details. Ignoring it.', ) continue events.append(event) if len(result_data) < GRAPH_QUERY_LIMIT: break offset = cast(int, param_values['offset']) param_values = { **param_values, 'offset': offset + GRAPH_QUERY_LIMIT, } return events # type: ignore # the suggested type is not the right one
def _get_trades_graph_for_address( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, ) -> List[AMMTrade]: """Get the address' trades data querying the Uniswap subgraph Each trade (swap) instantiates an <AMMTrade>. The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`. Translated to Uniswap lingo: Trade type BUY: - `asset1In` (QUOTE, reserve1) is gt 0. - `asset0Out` (BASE, reserve0) is gt 0. Trade type SELL: - `asset0In` (BASE, reserve0) is gt 0. - `asset1Out` (QUOTE, reserve1) is gt 0. """ trades: List[AMMTrade] = [] param_types = { '$limit': 'Int!', '$offset': 'Int!', '$address': 'Bytes!', '$start_ts': 'BigInt!', '$end_ts': 'BigInt!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'address': address.lower(), 'start_ts': str(start_ts), 'end_ts': str(end_ts), } querystr = format_query_indentation(SWAPS_QUERY.format()) while True: result = self.graph.query( # type: ignore # caller already checks querystr=querystr, param_types=param_types, param_values=param_values, ) result_data = result['swaps'] for entry in result_data: swaps = [] for swap in entry['transaction']['swaps']: timestamp = swap['timestamp'] swap_token0 = swap['pair']['token0'] swap_token1 = swap['pair']['token1'] token0 = get_ethereum_token( symbol=swap_token0['symbol'], ethereum_address=to_checksum_address(swap_token0['id']), name=swap_token0['name'], decimals=swap_token0['decimals'], ) token1 = get_ethereum_token( symbol=swap_token1['symbol'], ethereum_address=to_checksum_address(swap_token1['id']), name=swap_token1['name'], decimals=int(swap_token1['decimals']), ) amount0_in = FVal(swap['amount0In']) amount1_in = FVal(swap['amount1In']) amount0_out = FVal(swap['amount0Out']) amount1_out = FVal(swap['amount1Out']) swaps.append(AMMSwap( tx_hash=swap['id'].split('-')[0], log_index=int(swap['logIndex']), address=address, from_address=to_checksum_address(swap['sender']), to_address=to_checksum_address(swap['to']), timestamp=Timestamp(int(timestamp)), location=Location.UNISWAP, token0=token0, token1=token1, amount0_in=AssetAmount(amount0_in), amount1_in=AssetAmount(amount1_in), amount0_out=AssetAmount(amount0_out), amount1_out=AssetAmount(amount1_out), )) # Now that we got all swaps for a transaction, create the trade object trades.extend(self._tx_swaps_to_trades(swaps)) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } return trades
def _get_events_graph( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, event_type: EventType, ) -> List[LiquidityPoolEvent]: """Get the address' events (mints & burns) querying the Uniswap subgraph Each event data is stored in a <LiquidityPoolEvent>. """ address_events: List[LiquidityPoolEvent] = [] if event_type == EventType.MINT: query = MINTS_QUERY query_schema = 'mints' elif event_type == EventType.BURN: query = BURNS_QUERY query_schema = 'burns' else: log.error(f'Unexpected event_type: {event_type}. Skipping events query.') return address_events param_types = { '$limit': 'Int!', '$offset': 'Int!', '$address': 'Bytes!', '$start_ts': 'BigInt!', '$end_ts': 'BigInt!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'address': address.lower(), 'start_ts': str(start_ts), 'end_ts': str(end_ts), } querystr = format_query_indentation(query.format()) while True: result = self.graph.query( # type: ignore # caller already checks querystr=querystr, param_types=param_types, param_values=param_values, ) result_data = result[query_schema] for event in result_data: token0_ = event['pair']['token0'] token1_ = event['pair']['token1'] token0 = get_ethereum_token( symbol=token0_['symbol'], ethereum_address=to_checksum_address(token0_['id']), name=token0_['name'], decimals=token0_['decimals'], ) token1 = get_ethereum_token( symbol=token1_['symbol'], ethereum_address=to_checksum_address(token1_['id']), name=token1_['name'], decimals=int(token1_['decimals']), ) lp_event = LiquidityPoolEvent( tx_hash=event['transaction']['id'], log_index=int(event['logIndex']), address=address, timestamp=Timestamp(int(event['timestamp'])), event_type=event_type, pool_address=to_checksum_address(event['pair']['id']), token0=token0, token1=token1, amount0=AssetAmount(FVal(event['amount0'])), amount1=AssetAmount(FVal(event['amount1'])), usd_price=Price(FVal(event['amountUSD'])), lp_amount=AssetAmount(FVal(event['liquidity'])), ) address_events.append(lp_event) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } return address_events
def _get_balances_graph( self, addresses: List[ChecksumEthAddress], ) -> ProtocolBalance: """Get the addresses' pools data querying the Uniswap subgraph Each liquidity position is converted into a <LiquidityPool>. """ address_balances: DDAddressBalances = defaultdict(list) known_assets: Set[EthereumToken] = set() unknown_assets: Set[UnknownEthereumToken] = set() addresses_lower = [address.lower() for address in addresses] querystr = format_query_indentation(LIQUIDITY_POSITIONS_QUERY.format()) param_types = { '$limit': 'Int!', '$offset': 'Int!', '$addresses': '[String!]', '$balance': 'BigDecimal!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'addresses': addresses_lower, 'balance': '0', } while True: result = self.graph.query( # type: ignore # caller already checks querystr=querystr, param_types=param_types, param_values=param_values, ) result_data = result['liquidityPositions'] for lp in result_data: user_address = to_checksum_address(lp['user']['id']) user_lp_balance = FVal(lp['liquidityTokenBalance']) lp_pair = lp['pair'] lp_address = to_checksum_address(lp_pair['id']) lp_total_supply = FVal(lp_pair['totalSupply']) # Insert LP tokens reserves within tokens dicts token0 = lp_pair['token0'] token0['total_amount'] = lp_pair['reserve0'] token1 = lp_pair['token1'] token1['total_amount'] = lp_pair['reserve1'] liquidity_pool_assets = [] for token in token0, token1: # Get the token <EthereumToken> or <UnknownEthereumToken> asset = get_ethereum_token( symbol=token['symbol'], ethereum_address=to_checksum_address(token['id']), name=token['name'], decimals=int(token['decimals']), ) # Classify the asset either as known or unknown if isinstance(asset, EthereumToken): known_assets.add(asset) elif isinstance(asset, UnknownEthereumToken): unknown_assets.add(asset) # Estimate the underlying asset total_amount asset_total_amount = FVal(token['total_amount']) user_asset_balance = ( user_lp_balance / lp_total_supply * asset_total_amount ) liquidity_pool_asset = LiquidityPoolAsset( asset=asset, total_amount=asset_total_amount, user_balance=Balance(amount=user_asset_balance), ) liquidity_pool_assets.append(liquidity_pool_asset) liquidity_pool = LiquidityPool( address=lp_address, assets=liquidity_pool_assets, total_supply=lp_total_supply, user_balance=Balance(amount=user_lp_balance), ) address_balances[user_address].append(liquidity_pool) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } protocol_balance = ProtocolBalance( address_balances=dict(address_balances), known_assets=known_assets, unknown_assets=unknown_assets, ) return protocol_balance
def _get_staking_events_graph( self, addresses: List[ChecksumEthAddress], identity_address_map: Dict[ChecksumAddress, ChecksumAddress], event_type: AdexEventType, from_timestamp: Optional[Timestamp] = None, to_timestamp: Optional[Timestamp] = None, ) -> Union[List[Bond], List[Unbond], List[UnbondRequest], List[ChannelWithdraw]]: """Get the addresses' events data querying the AdEx subgraph May raise: - DeserializationError: when there is a problem deserializing the events on the subgraph response. - RemoteError: when there is a problem querying the subgraph. """ user_identities = [ str(identity).lower() for identity in identity_address_map.keys() ] deserialization_method: DeserializationMethod querystr: str schema: Literal['bonds', 'unbonds', 'unbondRequests', 'channelWithdraws'] if event_type == AdexEventType.BOND: queried_addresses = user_identities deserialization_method = self._deserialize_bond querystr = format_query_indentation(BONDS_QUERY.format()) schema = 'bonds' elif event_type == AdexEventType.UNBOND: queried_addresses = user_identities deserialization_method = self._deserialize_unbond querystr = format_query_indentation(UNBONDS_QUERY.format()) schema = 'unbonds' elif event_type == AdexEventType.UNBOND_REQUEST: queried_addresses = user_identities deserialization_method = self._deserialize_unbond_request querystr = format_query_indentation(UNBOND_REQUESTS_QUERY.format()) schema = 'unbondRequests' elif event_type == AdexEventType.CHANNEL_WITHDRAW: queried_addresses = [address.lower() for address in addresses] deserialization_method = self._deserialize_channel_withdraw querystr = format_query_indentation( CHANNEL_WITHDRAWS_QUERY.format()) schema = 'channelWithdraws' else: raise AssertionError(f'Unexpected AdEx event type: {event_type}.') start_ts = from_timestamp or 0 end_ts = to_timestamp or ts_now() param_types = { '$limit': 'Int!', '$offset': 'Int!', '$queried_addresses': '[Bytes!]', '$start_ts': 'Int!', '$end_ts': 'Int!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'queried_addresses': queried_addresses, 'start_ts': start_ts, 'end_ts': end_ts, } events = [] while True: try: result = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(protocol="AdEx", error_msg=str(e)), ) raise result_data = result[schema] for raw_event in result_data: event = deserialization_method( raw_event=raw_event, identity_address_map=identity_address_map, ) events.append(event) if len(result_data) < GRAPH_QUERY_LIMIT: break offset = cast(int, param_values['offset']) param_values = { **param_values, 'offset': offset + GRAPH_QUERY_LIMIT, } return events # type: ignore # the suggested type is not the right one
def _get_balances_graph( self, addresses: List[ChecksumEthAddress], ) -> ProtocolBalance: """Get the addresses' pools data querying this AMM's subgraph Each liquidity position is converted into a <LiquidityPool>. """ address_balances: DDAddressToLPBalances = defaultdict(list) known_assets: Set[EthereumToken] = set() unknown_assets: Set[EthereumToken] = set() addresses_lower = [address.lower() for address in addresses] querystr = format_query_indentation(LIQUIDITY_POSITIONS_QUERY.format()) query_id = '0' query_offset = 0 param_types = { '$limit': 'Int!', '$offset': 'Int!', '$addresses': '[String!]', '$balance': 'BigDecimal!', '$id': 'ID!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'addresses': addresses_lower, 'balance': '0', 'id': query_id, } while True: try: result = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format( error_msg=str(e), location=self.location, ), ) raise result_data = result['liquidityPositions'] for lp in result_data: lp_pair = lp['pair'] lp_total_supply = FVal(lp_pair['totalSupply']) user_lp_balance = FVal(lp['liquidityTokenBalance']) try: user_address = deserialize_ethereum_address( lp['user']['id']) lp_address = deserialize_ethereum_address(lp_pair['id']) except DeserializationError as e: msg = ( f'Failed to Deserialize address. Skipping {self.location} ' f'pool {lp_pair} with user address {lp["user"]["id"]}') log.error(msg) raise RemoteError(msg) from e # Insert LP tokens reserves within tokens dicts token0 = lp_pair['token0'] token0['total_amount'] = lp_pair['reserve0'] token1 = lp_pair['token1'] token1['total_amount'] = lp_pair['reserve1'] liquidity_pool_assets = [] for token in token0, token1: try: deserialized_eth_address = deserialize_ethereum_address( token['id']) except DeserializationError as e: msg = ( f'Failed to deserialize token address {token["id"]} ' f'Bad token address in {self.location} lp pair came from the graph.' ) log.error(msg) raise RemoteError(msg) from e asset = get_or_create_ethereum_token( userdb=self.database, symbol=token['symbol'], ethereum_address=deserialized_eth_address, name=token['name'], decimals=int(token['decimals']), ) if asset.has_oracle(): known_assets.add(asset) else: unknown_assets.add(asset) # Estimate the underlying asset total_amount asset_total_amount = FVal(token['total_amount']) user_asset_balance = (user_lp_balance / lp_total_supply * asset_total_amount) liquidity_pool_asset = LiquidityPoolAsset( asset=asset, total_amount=asset_total_amount, user_balance=Balance(amount=user_asset_balance), ) liquidity_pool_assets.append(liquidity_pool_asset) liquidity_pool = LiquidityPool( address=lp_address, assets=liquidity_pool_assets, total_supply=lp_total_supply, user_balance=Balance(amount=user_lp_balance), ) address_balances[user_address].append(liquidity_pool) query_id = lp['id'] # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step if query_offset == GRAPH_QUERY_SKIP_LIMIT: query_offset = 0 new_query_id = query_id else: query_offset += GRAPH_QUERY_LIMIT new_query_id = '0' param_values = { **param_values, 'id': new_query_id, 'offset': query_offset, } protocol_balance = ProtocolBalance( address_balances=dict(address_balances), known_assets=known_assets, unknown_assets=unknown_assets, ) return protocol_balance
def _get_unknown_asset_price_graph( self, unknown_assets: Set[EthereumToken], ) -> AssetToPrice: """Get today's tokens prices via the AMM subgraph AMM provides a token price every day at 00:00:00 UTC This function can raise RemoteError """ asset_price: AssetToPrice = {} unknown_assets_addresses = ([ asset.ethereum_address.lower() for asset in unknown_assets ]) querystr = format_query_indentation(TOKEN_DAY_DATAS_QUERY.format()) today_epoch = int( datetime.combine(datetime.utcnow().date(), time.min).timestamp(), ) param_types = { '$limit': 'Int!', '$offset': 'Int!', '$token_ids': '[String!]', '$datetime': 'Int!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'token_ids': unknown_assets_addresses, 'datetime': today_epoch, } while True: try: result = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e), location=self.location), ) raise result_data = result['tokenDayDatas'] for tdd in result_data: try: token_address = deserialize_ethereum_address( tdd['token']['id']) except DeserializationError as e: msg = (f'Error deserializing address {tdd["token"]["id"]} ' f'during {self.location} prices query from graph.') log.error(msg) raise RemoteError(msg) from e asset_price[token_address] = Price(FVal(tdd['priceUSD'])) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } return asset_price
def _read_subgraph_trades( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, ) -> List[AMMTrade]: """Get the address' trades data querying the AMM subgraph Each trade (swap) instantiates an <AMMTrade>. The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`. Translated to AMM lingo: Trade type BUY: - `asset1In` (QUOTE, reserve1) is gt 0. - `asset0Out` (BASE, reserve0) is gt 0. Trade type SELL: - `asset0In` (BASE, reserve0) is gt 0. - `asset1Out` (QUOTE, reserve1) is gt 0. May raise - RemoteError """ trades: List[AMMTrade] = [] query_id = '0' query_offset = 0 param_types = { '$limit': 'Int!', '$offset': 'Int!', '$address': 'Bytes!', '$start_ts': 'BigInt!', '$end_ts': 'BigInt!', '$id': 'ID!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'address': address.lower(), 'start_ts': str(start_ts), 'end_ts': str(end_ts), 'id': query_id, } querystr = format_query_indentation(self.swaps_query.format()) while True: try: result = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e), location=self.location), ) raise for entry in result['swaps']: swaps = [] try: for swap in entry['transaction']['swaps']: timestamp = swap['timestamp'] swap_token0 = swap['pair']['token0'] swap_token1 = swap['pair']['token1'] try: token0_deserialized = deserialize_ethereum_address( swap_token0['id']) token1_deserialized = deserialize_ethereum_address( swap_token1['id']) from_address_deserialized = deserialize_ethereum_address( swap['sender']) # noqa to_address_deserialized = deserialize_ethereum_address( swap['to']) except DeserializationError: msg = ( f'Failed to deserialize addresses in trade from {self.location} graph' # noqa f' with token 0: {swap_token0["id"]}, token 1: {swap_token1["id"]}, ' # noqa f'swap sender: {swap["sender"]}, swap receiver {swap["to"]}' ) log.error(msg) continue token0 = get_or_create_ethereum_token( userdb=self.database, symbol=swap_token0['symbol'], ethereum_address=token0_deserialized, name=swap_token0['name'], decimals=swap_token0['decimals'], ) token1 = get_or_create_ethereum_token( userdb=self.database, symbol=swap_token1['symbol'], ethereum_address=token1_deserialized, name=swap_token1['name'], decimals=int(swap_token1['decimals']), ) try: amount0_in = FVal(swap['amount0In']) amount1_in = FVal(swap['amount1In']) amount0_out = FVal(swap['amount0Out']) amount1_out = FVal(swap['amount1Out']) except ValueError as e: log.error( f'Failed to read amounts in {self.location} swap {str(swap)}. ' f'{str(e)}.', ) continue swaps.append( AMMSwap( tx_hash=swap['id'].split('-')[0], log_index=int(swap['logIndex']), address=address, from_address=from_address_deserialized, to_address=to_address_deserialized, timestamp=Timestamp(int(timestamp)), location=self.location, token0=token0, token1=token1, amount0_in=AssetAmount(amount0_in), amount1_in=AssetAmount(amount1_in), amount0_out=AssetAmount(amount0_out), amount1_out=AssetAmount(amount1_out), )) query_id = entry['id'] except KeyError as e: log.error( f'Failed to read trade in {self.location} swap {str(entry)}. ' f'{str(e)}.', ) continue # with the new logic the list of swaps can be empty, in that case don't try # to make trades from the swaps if len(swaps) == 0: continue # Now that we got all swaps for a transaction, create the trade object trades.extend(self._tx_swaps_to_trades(swaps)) # Check whether an extra request is needed if len(result['swaps']) < GRAPH_QUERY_LIMIT: break # Update pagination step if query_offset == GRAPH_QUERY_SKIP_LIMIT: query_offset = 0 new_query_id = query_id else: query_offset += GRAPH_QUERY_LIMIT new_query_id = '0' param_values = { **param_values, 'id': new_query_id, 'offset': query_offset, } return trades
def _get_events_graph( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, event_type: EventType, ) -> List[LiquidityPoolEvent]: """Get the address' events (mints & burns) querying the AMM's subgraph Each event data is stored in a <LiquidityPoolEvent>. """ address_events: List[LiquidityPoolEvent] = [] if event_type == self.mint_event: query = MINTS_QUERY query_schema = 'mints' elif event_type == self.burn_event: query = BURNS_QUERY query_schema = 'burns' else: log.error( f'Unexpected {self.location} event_type: {event_type}. Skipping events query.', ) return address_events query_id = '0' query_offset = 0 param_types = { '$limit': 'Int!', '$offset': 'Int!', '$address': 'Bytes!', '$start_ts': 'BigInt!', '$end_ts': 'BigInt!', '$id': 'ID!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': query_offset, 'address': address.lower(), 'start_ts': str(start_ts), 'end_ts': str(end_ts), 'id': query_id, } querystr = format_query_indentation(query.format()) while True: try: result = self.graph.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e), location=self.location), ) raise except AttributeError as e: raise ModuleInitializationFailure( f'{self.location} subgraph remote error') from e result_data = result[query_schema] for event in result_data: token0_ = event['pair']['token0'] token1_ = event['pair']['token1'] try: token0_deserialized = deserialize_ethereum_address( token0_['id']) token1_deserialized = deserialize_ethereum_address( token1_['id']) pool_deserialized = deserialize_ethereum_address( event['pair']['id']) except DeserializationError as e: msg = ( f'Failed to deserialize address involved in liquidity pool event for' f' {self.location}. Token 0: {token0_["id"]}, token 1: {token0_["id"]},' f' pair: {event["pair"]["id"]}.') log.error(msg) raise RemoteError(msg) from e token0 = get_or_create_ethereum_token( userdb=self.database, symbol=token0_['symbol'], ethereum_address=token0_deserialized, name=token0_['name'], decimals=token0_['decimals'], ) token1 = get_or_create_ethereum_token( userdb=self.database, symbol=token1_['symbol'], ethereum_address=token1_deserialized, name=token1_['name'], decimals=int(token1_['decimals']), ) lp_event = LiquidityPoolEvent( tx_hash=event['transaction']['id'], log_index=int(event['logIndex']), address=address, timestamp=Timestamp(int(event['timestamp'])), event_type=event_type, pool_address=pool_deserialized, token0=token0, token1=token1, amount0=AssetAmount(FVal(event['amount0'])), amount1=AssetAmount(FVal(event['amount1'])), usd_price=Price(FVal(event['amountUSD'])), lp_amount=AssetAmount(FVal(event['liquidity'])), ) address_events.append(lp_event) query_id = event['id'] # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step if query_offset == GRAPH_QUERY_SKIP_LIMIT: query_offset = 0 new_query_id = query_id else: query_offset += GRAPH_QUERY_LIMIT new_query_id = '0' param_values = { **param_values, 'id': new_query_id, 'offset': query_offset, } return address_events
def _get_trades_graph_v3_for_address( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, ) -> List[AMMTrade]: """Get the address' trades data querying the Uniswap subgraph Each trade (swap) instantiates an <AMMTrade>. The trade pair (i.e. BASE_QUOTE) is determined by `reserve0_reserve1`. Translated to Uniswap lingo: Trade type BUY: - `amount1` (QUOTE, reserve1) is gt 0. - `amount0` (BASE, reserve0) is lt 0. Trade type SELL: - `amount0` (BASE, reserve0) is gt 0. - `amount1` (QUOTE, reserve1) is lt 0. May raise: - RemoteError """ trades: List[AMMTrade] = [] param_types = { '$limit': 'Int!', '$offset': 'Int!', '$address': 'Bytes!', '$start_ts': 'BigInt!', '$end_ts': 'BigInt!', } param_values = { 'limit': GRAPH_QUERY_LIMIT, 'offset': 0, 'address': address.lower(), 'start_ts': str(start_ts), 'end_ts': str(end_ts), } querystr = format_query_indentation(V3_SWAPS_QUERY.format()) while True: try: result = self.graph_v3.query( querystr=querystr, param_types=param_types, param_values=param_values, ) except RemoteError as e: self.msg_aggregator.add_error(SUBGRAPH_REMOTE_ERROR_MSG.format(error_msg=str(e))) raise result_data = result['swaps'] for entry in result_data: swaps = [] for swap in entry['transaction']['swaps']: timestamp = swap['timestamp'] swap_token0 = swap['token0'] swap_token1 = swap['token1'] try: token0_deserialized = deserialize_ethereum_address(swap_token0['id']) token1_deserialized = deserialize_ethereum_address(swap_token1['id']) from_address_deserialized = deserialize_ethereum_address(swap['sender']) to_address_deserialized = deserialize_ethereum_address(swap['recipient']) except DeserializationError: msg = ( f'Failed to deserialize addresses in trade from uniswap graph with ' f'token 0: {swap_token0["id"]}, token 1: {swap_token1["id"]}, ' f'swap sender: {swap["sender"]}, swap receiver {swap["to"]}' ) log.error(msg) continue token0 = get_or_create_ethereum_token( userdb=self.database, symbol=swap_token0['symbol'], ethereum_address=token0_deserialized, name=swap_token0['name'], decimals=swap_token0['decimals'], ) token1 = get_or_create_ethereum_token( userdb=self.database, symbol=swap_token1['symbol'], ethereum_address=token1_deserialized, name=swap_token1['name'], decimals=int(swap_token1['decimals']), ) try: if swap['amount0'].startswith('-'): amount0_in = AssetAmount(FVal(ZERO)) amount0_out = deserialize_asset_amount_force_positive(swap['amount0']) amount1_in = deserialize_asset_amount_force_positive(swap['amount1']) amount1_out = AssetAmount(FVal(ZERO)) else: amount0_in = deserialize_asset_amount_force_positive(swap['amount0']) amount0_out = AssetAmount(FVal(ZERO)) amount1_in = AssetAmount(FVal(ZERO)) amount1_out = deserialize_asset_amount_force_positive(swap['amount1']) except ValueError as e: log.error( f'Failed to read amounts in Uniswap V3 swap {str(swap)}. ' f'{str(e)}.', ) continue swaps.append(AMMSwap( tx_hash=swap['id'].split('#')[0], log_index=int(swap['logIndex']), address=address, from_address=from_address_deserialized, to_address=to_address_deserialized, timestamp=Timestamp(int(timestamp)), location=Location.UNISWAP, token0=token0, token1=token1, amount0_in=amount0_in, amount1_in=amount1_in, amount0_out=amount0_out, amount1_out=amount1_out, )) # with the new logic the list of swaps can be empty, in that case don't try # to make trades from the swaps if len(swaps) == 0: continue # Now that we got all swaps for a transaction, create the trade object trades.extend(self._tx_swaps_to_trades(swaps)) # Check whether an extra request is needed if len(result_data) < GRAPH_QUERY_LIMIT: break # Update pagination step param_values = { **param_values, 'offset': param_values['offset'] + GRAPH_QUERY_LIMIT, # type: ignore } return trades