def test_update_used_query_range(database): dbranges = DBQueryRanges(database) location1 = 'location1' location2 = 'location2' database.update_used_query_range(location1, 15, 25) database.update_used_query_range(location2, 10, 125) start_ts = 12 end_ts = 90 query_range = dbranges.get_location_query_ranges(location1, start_ts, end_ts) dbranges.update_used_query_range( location1, queried_ranges=[(start_ts, end_ts)] + query_range, ) assert database.get_used_query_range(location1) == (12, 90) start_ts = 250 end_ts = 500 query_range = dbranges.get_location_query_ranges(location2, start_ts, end_ts) dbranges.update_used_query_range( location2, queried_ranges=[(start_ts, end_ts)] + query_range, ) assert database.get_used_query_range(location2) == (10, 500)
def query_deposits_withdrawals( self, start_ts: Timestamp, end_ts: Timestamp, ) -> List[AssetMovement]: """Queries the local DB and the exchange for the deposits/withdrawal history of the user""" asset_movements = self.db.get_asset_movements( from_ts=start_ts, to_ts=end_ts, location=deserialize_location(self.name), ) ranges = DBQueryRanges(self.db) ranges_to_query = ranges.get_location_query_ranges( location_string=f'{self.name}_asset_movements', start_ts=start_ts, end_ts=end_ts, ) new_movements = [] for query_start_ts, query_end_ts in ranges_to_query: new_movements.extend(self.query_online_deposits_withdrawals( start_ts=query_start_ts, end_ts=query_end_ts, )) if new_movements != []: self.db.add_asset_movements(new_movements) ranges.update_used_query_range( location_string=f'{self.name}_asset_movements', start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) asset_movements.extend(new_movements) return asset_movements
def query_trade_history( self, start_ts: Timestamp, end_ts: Timestamp, only_cache: bool, ) -> List[Trade]: """Queries the local DB and the remote exchange for the trade history of the user Limits the query to the given time range and also if only_cache is True returns only what is already saved in the DB without performing an exchange query """ log.debug(f'Querying trade history for {self.name} exchange') filter_query = TradesFilterQuery.make( from_ts=start_ts, to_ts=end_ts, location=self.location, ) trades = self.db.get_trades( filter_query=filter_query, has_premium= True, # this is okay since the returned trades don't make it to the user ) if only_cache: return trades ranges = DBQueryRanges(self.db) location_string = f'{str(self.location)}_trades_{self.name}' ranges_to_query = ranges.get_location_query_ranges( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ) for query_start_ts, query_end_ts in ranges_to_query: # If we have a time frame we have not asked the exchange for trades then # go ahead and do that now log.debug( f'Querying online trade history for {self.name} between ' f'{query_start_ts} and {query_end_ts}', ) new_trades, queried_range = self.query_online_trade_history( start_ts=query_start_ts, end_ts=query_end_ts, ) # make sure to add them to the DB if new_trades != []: self.db.add_trades(new_trades) # and also set the used queried timestamp range for the exchange ranges.update_used_query_range( location_string=location_string, start_ts=queried_range[0], end_ts=queried_range[1], ranges_to_query=[queried_range], ) # finally append them to the already returned DB trades trades.extend(new_trades) return trades
def query_deposits_withdrawals( self, start_ts: Timestamp, end_ts: Timestamp, only_cache: bool, ) -> List[AssetMovement]: """Queries the local DB and the exchange for the deposits/withdrawal history of the user If only_cache is true only what is already cached in the DB is returned without an actual exchange query. """ log.debug( f'Querying deposits/withdrawals history for {self.name} exchange') filter_query = AssetMovementsFilterQuery.make( from_ts=start_ts, to_ts=end_ts, location=self.location, ) asset_movements = self.db.get_asset_movements( filter_query=filter_query, has_premium= True, # this is okay since the returned trades don't make it to the user ) if only_cache: return asset_movements ranges = DBQueryRanges(self.db) location_string = f'{str(self.location)}_asset_movements_{self.name}' ranges_to_query = ranges.get_location_query_ranges( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ) new_movements = [] for query_start_ts, query_end_ts in ranges_to_query: log.debug( f'Querying online deposits/withdrawals for {self.name} between ' f'{query_start_ts} and {query_end_ts}', ) new_movements.extend( self.query_online_deposits_withdrawals( start_ts=query_start_ts, end_ts=query_end_ts, )) if new_movements != []: self.db.add_asset_movements(new_movements) ranges.update_used_query_range( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) asset_movements.extend(new_movements) return asset_movements
def query_grant_history( self, grant_id: Optional[int], from_ts: Optional[Timestamp] = None, to_ts: Optional[Timestamp] = None, only_cache: bool = False, ) -> Dict[int, Dict[str, Any]]: """May raise: - RemotError if there is an error querying the gitcoin API - InputError if only_cache is False and grant_id is missing """ if only_cache: return self.get_history_from_db( grant_id=grant_id, from_ts=from_ts, to_ts=to_ts, ) if grant_id is None: raise InputError( 'Attempted to query gitcoin events from the api without specifying a grant id', ) entry_name = f'{GITCOIN_GRANTS_PREFIX}_{grant_id}' dbranges = DBQueryRanges(self.db) from_timestamp = GITCOIN_START_TS if from_ts is None else from_ts to_timestamp = ts_now() if to_ts is None else to_ts ranges = dbranges.get_location_query_ranges( location_string=entry_name, start_ts=from_timestamp, end_ts=to_timestamp, ) grant_created_on: Optional[Timestamp] = None for period_range in ranges: actions, grant_created_on = self.query_grant_history_period( grant_id=grant_id, grant_created_on=grant_created_on, from_timestamp=period_range[0], to_timestamp=period_range[1], ) self.db_ledger.add_ledger_actions(actions) dbranges.update_used_query_range( location_string=entry_name, start_ts=from_timestamp, end_ts=to_timestamp, ranges_to_query=ranges, ) return self.get_history_from_db( grant_id=grant_id, from_ts=from_ts, to_ts=to_ts, )
def query_trade_history( self, start_ts: Timestamp, end_ts: Timestamp, only_cache: bool, ) -> List[Trade]: """Queries the local DB and the remote exchange for the trade history of the user Limits the query to the given time range and also if only_cache is True returns only what is already saved in the DB without performing an exchange query """ trades = self.db.get_trades( from_ts=start_ts, to_ts=end_ts, location=deserialize_location(self.name), ) if only_cache: return trades ranges = DBQueryRanges(self.db) ranges_to_query = ranges.get_location_query_ranges( location_string=f'{self.name}_trades', start_ts=start_ts, end_ts=end_ts, ) new_trades = [] for query_start_ts, query_end_ts in ranges_to_query: # If we have a time frame we have not asked the exchange for trades then # go ahead and do that now new_trades.extend( self.query_online_trade_history( start_ts=query_start_ts, end_ts=query_end_ts, )) # make sure to add them to the DB if new_trades != []: self.db.add_trades(new_trades) # and also set the used queried timestamp range for the exchange ranges.update_used_query_range( location_string=f'{self.name}_trades', start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) # finally append them to the already returned DB trades trades.extend(new_trades) return trades
def query_income_loss_expense( self, start_ts: Timestamp, end_ts: Timestamp, only_cache: bool, ) -> List[LedgerAction]: """Queries the local DB and the exchange for the income/loss/expense history of the user If only_cache is true only what is already cached in the DB is returned without an actual exchange query. """ db = DBLedgerActions(self.db, self.db.msg_aggregator) filter_query = LedgerActionsFilterQuery.make( from_ts=start_ts, to_ts=end_ts, location=self.location, ) # has_premium True is fine here since the result of this is not user facing atm ledger_actions = db.get_ledger_actions(filter_query=filter_query, has_premium=True) if only_cache: return ledger_actions ranges = DBQueryRanges(self.db) location_string = f'{str(self.location)}_ledger_actions_{self.name}' ranges_to_query = ranges.get_location_query_ranges( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ) new_ledger_actions = [] for query_start_ts, query_end_ts in ranges_to_query: new_ledger_actions.extend( self.query_online_income_loss_expense( start_ts=query_start_ts, end_ts=query_end_ts, )) if new_ledger_actions != []: db.add_ledger_actions(new_ledger_actions) ranges.update_used_query_range( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) ledger_actions.extend(new_ledger_actions) return ledger_actions
def query_trade_history( self, start_ts: Timestamp, end_ts: Timestamp, ) -> List[Trade]: """Queries the local DB and the remote exchange for the trade history of the user""" trades = self.db.get_trades( from_ts=start_ts, to_ts=end_ts, location=deserialize_location(self.name), ) ranges = DBQueryRanges(self.db) ranges_to_query = ranges.get_location_query_ranges( location_string=f'{self.name}_trades', start_ts=start_ts, end_ts=end_ts, ) new_trades = [] for query_start_ts, query_end_ts in ranges_to_query: # If we have a time frame we have not asked the exchange for trades then # go ahead and do that now try: new_trades.extend( self.query_online_trade_history( start_ts=query_start_ts, end_ts=query_end_ts, )) except NotImplementedError: msg = 'query_online_trade_history should only not be implemented by bitmex' assert self.name == 'bitmex', msg # make sure to add them to the DB if new_trades != []: self.db.add_trades(new_trades) # and also set the used queried timestamp range for the exchange ranges.update_used_query_range( location_string=f'{self.name}_trades', start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) # finally append them to the already returned DB trades trades.extend(new_trades) return trades
def query_margin_history( self, start_ts: Timestamp, end_ts: Timestamp, ) -> List[MarginPosition]: """Queries the local DB and the remote exchange for the margin positions history of the user """ log.debug(f'Querying margin history for {self.name} exchange') margin_positions = self.db.get_margin_positions( from_ts=start_ts, to_ts=end_ts, location=self.location, ) ranges = DBQueryRanges(self.db) location_string = f'{str(self.location)}_margins_{self.name}' ranges_to_query = ranges.get_location_query_ranges( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ) new_positions = [] for query_start_ts, query_end_ts in ranges_to_query: log.debug( f'Querying online margin history for {self.name} between ' f'{query_start_ts} and {query_end_ts}', ) new_positions.extend( self.query_online_margin_history( start_ts=query_start_ts, end_ts=query_end_ts, )) # make sure to add them to the DB if new_positions != []: self.db.add_margin_positions(new_positions) # and also set the last queried timestamp for the exchange ranges.update_used_query_range( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) # finally append them to the already returned DB margin positions margin_positions.extend(new_positions) return margin_positions
def query_deposits_withdrawals( self, start_ts: Timestamp, end_ts: Timestamp, only_cache: bool, ) -> List[AssetMovement]: """Queries the local DB and the exchange for the deposits/withdrawal history of the user If only_cache is true only what is already cached in the DB is returned without an actual exchange query. """ asset_movements = self.db.get_asset_movements( from_ts=start_ts, to_ts=end_ts, location=self.location, ) if only_cache: return asset_movements ranges = DBQueryRanges(self.db) ranges_to_query = ranges.get_location_query_ranges( location_string=f'{str(self.location)}_asset_movements', start_ts=start_ts, end_ts=end_ts, ) new_movements = [] for query_start_ts, query_end_ts in ranges_to_query: new_movements.extend( self.query_online_deposits_withdrawals( start_ts=query_start_ts, end_ts=query_end_ts, )) if new_movements != []: self.db.add_asset_movements(new_movements) ranges.update_used_query_range( location_string=f'{str(self.location)}_asset_movements', start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) asset_movements.extend(new_movements) return asset_movements
def query_margin_history( self, start_ts: Timestamp, end_ts: Timestamp, ) -> List[MarginPosition]: """Queries the local DB and the remote exchange for the margin positions history of the user """ margin_positions = self.db.get_margin_positions( from_ts=start_ts, to_ts=end_ts, location=self.name, ) ranges = DBQueryRanges(self.db) ranges_to_query = ranges.get_location_query_ranges( location_string=f'{self.name}_margins', start_ts=start_ts, end_ts=end_ts, ) new_positions = [] for query_start_ts, query_end_ts in ranges_to_query: try: new_positions.extend( self.query_online_margin_history( start_ts=query_start_ts, end_ts=query_end_ts, )) except NotImplementedError: pass # make sure to add them to the DB if new_positions != []: self.db.add_margin_positions(new_positions) # and also set the last queried timestamp for the exchange ranges.update_used_query_range( location_string=f'{self.name}_margins', start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) # finally append them to the already returned DB margin positions margin_positions.extend(new_positions) return margin_positions
def single_address_query_transactions( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, ) -> None: """Only queries new transactions and adds them to the DB""" ranges = DBQueryRanges(self.database) ranges_to_query = ranges.get_location_query_ranges( location_string=f'ethtxs_{address}', start_ts=start_ts, end_ts=end_ts, ) new_transactions = [] dbethtx = DBEthTx(self.database) for query_start_ts, query_end_ts in ranges_to_query: try: new_transactions.extend(self.ethereum.etherscan.get_transactions( account=address, from_ts=query_start_ts, to_ts=query_end_ts, )) except RemoteError as e: self.ethereum.msg_aggregator.add_error( f'Got error "{str(e)}" while querying ethereum transactions ' f'from Etherscan. Transactions not added to the DB ' f'from_ts: {query_start_ts} ' f'to_ts: {query_end_ts} ', ) # add new transactions to the DB if new_transactions != []: dbethtx.add_ethereum_transactions(new_transactions) # and also set the last queried timestamps for the address ranges.update_used_query_range( location_string=f'ethtxs_{address}', start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, )
def test_update_used_query_range(database): dbranges = DBQueryRanges(database) location1 = 'location1' location2 = 'location2' database.update_used_query_range(location1, 15, 25) database.update_used_query_range(location2, 10, 125) dbranges.update_used_query_range(location1, 0, 10, []) msg = 'empty used query range should do nothing' assert database.get_used_query_range(location1) == (15, 25), msg start_ts = 12 end_ts = 90 query_range = dbranges.get_location_query_ranges(location1, start_ts, end_ts) dbranges.update_used_query_range( location1, start_ts=start_ts, end_ts=end_ts, ranges_to_query=query_range, ) assert database.get_used_query_range(location1) == (12, 90) start_ts = 250 end_ts = 500 query_range = dbranges.get_location_query_ranges(location2, start_ts, end_ts) dbranges.update_used_query_range( location2, start_ts=start_ts, end_ts=end_ts, ranges_to_query=query_range, ) assert database.get_used_query_range(location2) == (10, 500)
def _get_transactions_for_range( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, ) -> None: """Queries etherscan for all ethereum transactions of address in the given ranges. If any transactions are found, they are added in the DB """ location_string = f'{RANGE_PREFIX_ETHTX}_{address}' ranges = DBQueryRanges(self.database) ranges_to_query = ranges.get_location_query_ranges( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ) dbethtx = DBEthTx(self.database) for query_start_ts, query_end_ts in ranges_to_query: log.debug( f'Querying Transactions for {address} -> {query_start_ts} - {query_end_ts}' ) try: for new_transactions in self.ethereum.etherscan.get_transactions( account=address, from_ts=query_start_ts, to_ts=query_end_ts, action='txlist', ): # add new transactions to the DB if len(new_transactions) != 0: dbethtx.add_ethereum_transactions( ethereum_transactions=new_transactions, relevant_address=address, ) ranges.update_used_query_range( # update last queried time for the address location_string=location_string, queried_ranges=[(query_start_ts, new_transactions[-1].timestamp)], ) self.msg_aggregator.add_message( message_type=WSMessageType. ETHEREUM_TRANSACTION_STATUS, data={ 'address': address, 'period': [ query_start_ts, new_transactions[-1].timestamp ], 'status': str(TransactionStatusStep.QUERYING_TRANSACTIONS ), }, ) except RemoteError as e: self.ethereum.msg_aggregator.add_error( f'Got error "{str(e)}" while querying ethereum transactions ' f'from Etherscan. Some transactions not added to the DB ' f'address: {address} ' f'from_ts: {query_start_ts} ' f'to_ts: {query_end_ts} ', ) return log.debug( f'Transactions done for {address}. Update range {start_ts} - {end_ts}' ) ranges.update_used_query_range( # entire range is now considered queried location_string=location_string, queried_ranges=[(start_ts, end_ts)], )
def _single_address_query_transactions( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, with_limit: bool, ) -> List[EthereumTransaction]: self.tx_per_address[address] = 0 transactions = self.database.get_ethereum_transactions( from_ts=start_ts, to_ts=end_ts, address=address, ) ranges = DBQueryRanges(self.database) ranges_to_query = ranges.get_location_query_ranges( location_string=f'ethtxs_{address}', start_ts=start_ts, end_ts=end_ts, ) new_transactions = [] for query_start_ts, query_end_ts in ranges_to_query: for internal in (False, True): try: new_transactions.extend( self.etherscan.get_transactions( account=address, internal=internal, from_ts=query_start_ts, to_ts=query_end_ts, )) except RemoteError as e: self.msg_aggregator.add_error( f'Got error "{str(e)}" while querying ethereum transactions ' f'from Etherscan. Transactions not added to the DB ' f'from_ts: {query_start_ts} ' f'to_ts: {query_end_ts} ' f'internal: {internal}', ) # add new transactions to the DB if new_transactions != []: self.database.add_ethereum_transactions(new_transactions, from_etherscan=True) # And since at least for now the increasingly negative nonce for the internal # transactions happens only in the DB writing, requery the entire batch from # the DB to get the updated transactions transactions = self.database.get_ethereum_transactions( from_ts=start_ts, to_ts=end_ts, address=address, ) # and also set the last queried timestamps for the address ranges.update_used_query_range( location_string=f'ethtxs_{address}', start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) if with_limit: transactions_queried_so_far = sum( x for _, x in self.tx_per_address.items()) remaining_num_tx = FREE_ETH_TX_LIMIT - transactions_queried_so_far returning_tx_length = min(remaining_num_tx, len(transactions)) # Note down how many we got for this address self.tx_per_address[address] = returning_tx_length return transactions[:returning_tx_length] return transactions
def _get_internal_transactions_for_ranges( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, ) -> None: """Queries etherscan for all internal transactions of address in the given ranges. If any internal transactions are found, they are added in the DB """ location_string = f'{RANGE_PREFIX_ETHINTERNALTX}_{address}' ranges = DBQueryRanges(self.database) ranges_to_query = ranges.get_location_query_ranges( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ) dbethtx = DBEthTx(self.database) new_internal_txs = [] for query_start_ts, query_end_ts in ranges_to_query: log.debug( f'Querying Internal Transactions for {address} -> {query_start_ts} - {query_end_ts}' ) # noqa: E501 try: for new_internal_txs in self.ethereum.etherscan.get_transactions( account=address, from_ts=query_start_ts, to_ts=query_end_ts, action='txlistinternal', ): if len(new_internal_txs) != 0: for internal_tx in new_internal_txs: # make sure all internal transaction parent transactions are in the DB gevent.sleep(0) result = dbethtx.get_ethereum_transactions( ETHTransactionsFilterQuery.make( tx_hash=internal_tx.parent_tx_hash ), # noqa: E501 has_premium=True, # ignore limiting here ) if len( result ) == 0: # parent transaction is not in the DB. Get it transaction = self.ethereum.get_transaction_by_hash( internal_tx.parent_tx_hash) # noqa: E501 gevent.sleep(0) dbethtx.add_ethereum_transactions( ethereum_transactions=[transaction], relevant_address=address, ) timestamp = transaction.timestamp else: timestamp = result[0].timestamp dbethtx.add_ethereum_internal_transactions( transactions=[internal_tx], relevant_address=address, ) log.debug( f'Internal Transactions for {address} -> update range {query_start_ts} - {timestamp}' ) # noqa: E501 ranges.update_used_query_range( # update last queried time for address location_string=location_string, queried_ranges=[(query_start_ts, timestamp)], ) self.msg_aggregator.add_message( message_type=WSMessageType. ETHEREUM_TRANSACTION_STATUS, data={ 'address': address, 'period': [query_start_ts, timestamp], 'status': str(TransactionStatusStep. QUERYING_INTERNAL_TRANSACTIONS ), # noqa: E501 }, ) except RemoteError as e: self.ethereum.msg_aggregator.add_error( f'Got error "{str(e)}" while querying internal ethereum transactions ' f'from Etherscan. Transactions not added to the DB ' f'address: {address} ' f'from_ts: {query_start_ts} ' f'to_ts: {query_end_ts} ', ) return log.debug( f'Internal Transactions for address {address} done. Update range {start_ts} - {end_ts}' ) # noqa: E501 ranges.update_used_query_range( # entire range is now considered queried location_string=location_string, queried_ranges=[(start_ts, end_ts)], )
def single_address_query_transactions( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, with_limit: bool, only_cache: bool, ) -> List[EthereumTransaction]: self.tx_per_address[address] = 0 transactions = self.database.get_ethereum_transactions( from_ts=start_ts, to_ts=end_ts, address=address, ) if only_cache: return self._return_transactions_maybe_limit( address=address, transactions=transactions, with_limit=with_limit, ) ranges = DBQueryRanges(self.database) ranges_to_query = ranges.get_location_query_ranges( location_string=f'ethtxs_{address}', start_ts=start_ts, end_ts=end_ts, ) new_transactions = [] for query_start_ts, query_end_ts in ranges_to_query: for internal in (False, True): try: new_transactions.extend( self.etherscan.get_transactions( account=address, internal=internal, from_ts=query_start_ts, to_ts=query_end_ts, )) except RemoteError as e: self.msg_aggregator.add_error( f'Got error "{str(e)}" while querying ethereum transactions ' f'from Etherscan. Transactions not added to the DB ' f'from_ts: {query_start_ts} ' f'to_ts: {query_end_ts} ' f'internal: {internal}', ) # add new transactions to the DB if new_transactions != []: self.database.add_ethereum_transactions(new_transactions, from_etherscan=True) # And since at least for now the increasingly negative nonce for the internal # transactions happens only in the DB writing, requery the entire batch from # the DB to get the updated transactions transactions = self.database.get_ethereum_transactions( from_ts=start_ts, to_ts=end_ts, address=address, ) # and also set the last queried timestamps for the address ranges.update_used_query_range( location_string=f'ethtxs_{address}', start_ts=start_ts, end_ts=end_ts, ranges_to_query=ranges_to_query, ) return self._return_transactions_maybe_limit( address=address, transactions=transactions, with_limit=with_limit, )
def _get_erc20_transfers_for_ranges( self, address: ChecksumEthAddress, start_ts: Timestamp, end_ts: Timestamp, ) -> None: """Queries etherscan for all erc20 transfers of address in the given ranges. If any transfers are found, they are added in the DB """ location_string = f'{RANGE_PREFIX_ETHTOKENTX}_{address}' dbethtx = DBEthTx(self.database) ranges = DBQueryRanges(self.database) ranges_to_query = ranges.get_location_query_ranges( location_string=location_string, start_ts=start_ts, end_ts=end_ts, ) for query_start_ts, query_end_ts in ranges_to_query: log.debug( f'Querying ERC20 Transfers for {address} -> {query_start_ts} - {query_end_ts}' ) # noqa: E501 try: for erc20_tx_hashes in self.ethereum.etherscan.get_token_transaction_hashes( account=address, from_ts=query_start_ts, to_ts=query_end_ts, ): for tx_hash in erc20_tx_hashes: tx_hash_bytes = deserialize_evm_tx_hash(tx_hash) result = dbethtx.get_ethereum_transactions( ETHTransactionsFilterQuery.make( tx_hash=tx_hash_bytes), has_premium=True, # ignore limiting here ) if len(result ) == 0: # if transaction is not there add it gevent.sleep(0) transaction = self.ethereum.get_transaction_by_hash( tx_hash_bytes) dbethtx.add_ethereum_transactions( [transaction], relevant_address=address, ) timestamp = transaction.timestamp else: timestamp = result[0].timestamp log.debug( f'ERC20 Transfers for {address} -> update range {query_start_ts} - {timestamp}' ) # noqa: E501 ranges.update_used_query_range( # update last queried time for the address location_string=location_string, queried_ranges=[(query_start_ts, timestamp)], ) self.msg_aggregator.add_message( message_type=WSMessageType. ETHEREUM_TRANSACTION_STATUS, data={ 'address': address, 'period': [query_start_ts, timestamp], 'status': str(TransactionStatusStep. QUERYING_ETHEREUM_TOKENS_TRANSACTIONS ), # noqa: E501 }, ) except RemoteError as e: self.ethereum.msg_aggregator.add_error( f'Got error "{str(e)}" while querying token transactions' f'from Etherscan. Transactions not added to the DB ' f'address: {address} ' f'from_ts: {query_start_ts} ' f'to_ts: {query_end_ts} ', ) log.debug( f'ERC20 Transfers done for address {address}. Update range {start_ts} - {end_ts}' ) # noqa: E501 ranges.update_used_query_range( # entire range is now considered queried location_string=location_string, queried_ranges=[(start_ts, end_ts)], )
def query_kraken_ledgers(self, start_ts: Timestamp, end_ts: Timestamp) -> bool: """ Query Kraken's ledger to retrieve events and transform them to our internal representation of history events. Internally we look for the query range that needs to be queried in the range (start_ts, end_ts) to avoid double quering the kraken API when this method is called for deposits/withdrawals and trades. The events queried are then stored in the database. Returns true if any query to the kraken API was not successful """ ranges = DBQueryRanges(self.db) range_query_name = f'{self.location}_history_events_{self.name}' ranges_to_query = ranges.get_location_query_ranges( location_string=range_query_name, start_ts=start_ts, end_ts=end_ts, ) with_errors = False for query_start_ts, query_end_ts in ranges_to_query: log.debug( f'Querying kraken ledger entries from {query_start_ts} to {query_end_ts}' ) try: response, with_errors = self.query_until_finished( endpoint='Ledgers', keyname='ledger', start_ts=query_start_ts, end_ts=query_end_ts, extra_dict={}, ) except RemoteError as e: self.msg_aggregator.add_error( f'Failed to query kraken ledger between {query_start_ts} and ' f'{query_end_ts}. {str(e)}', ) return True # Group related events raw_events_groupped = defaultdict(list) for raw_event in response: raw_events_groupped[raw_event['refid']].append(raw_event) new_events = [] for events in raw_events_groupped.values(): try: events = sorted( events, key=lambda x: deserialize_fval(x[ 'time'], 'time', 'kraken ledgers') * 1000, ) except DeserializationError as e: self.msg_aggregator.add_error( f'Failed to read timestamp in kraken event group ' f'due to {str(e)}. For more information read the logs. Skipping event', ) log.error(f'Failed to read timestamp for {events}') continue group_events, found_unknown_event = history_event_from_kraken( events=events, name=self.name, msg_aggregator=self.msg_aggregator, ) if found_unknown_event: for event in group_events: event.event_type = HistoryEventType.INFORMATIONAL new_events.extend(group_events) if len(new_events) != 0: try: self.history_events_db.add_history_events(new_events) except InputError as e: self.msg_aggregator.add_error( f'Failed to save kraken events from {query_start_ts} to {query_end_ts} ' f'in database. {str(e)}', ) ranges.update_used_query_range( location_string=range_query_name, queried_ranges=[(start_ts, end_ts)] + ranges_to_query, ) if with_errors is True: return True # we had errors so stop any further queries and quit return False # no errors
def _get_trades( self, addresses: List[ChecksumEthAddress], from_timestamp: Timestamp, to_timestamp: Timestamp, only_cache: bool, ) -> AddressTrades: """Request via graph all trades for new addresses and the latest ones for already existing addresses. Then the requested trade are written in DB and finally all DB trades are read and returned. """ address_amm_trades: AddressTrades = {} new_addresses: List[ChecksumEthAddress] = [] existing_addresses: List[ChecksumEthAddress] = [] min_end_ts: Timestamp = to_timestamp if only_cache: return self._fetch_trades_from_db(addresses, from_timestamp, to_timestamp) dbranges = DBQueryRanges(self.database) # Get addresses' last used query range for this AMM's trades for address in addresses: entry_name = f'{self.trades_prefix}_{address}' trades_range = self.database.get_used_query_range(name=entry_name) if not trades_range: new_addresses.append(address) else: existing_addresses.append(address) min_end_ts = min(min_end_ts, trades_range[1]) # Request new addresses' trades if new_addresses: start_ts = Timestamp(0) new_address_trades = self._get_trades_graph( addresses=new_addresses, start_ts=start_ts, end_ts=to_timestamp, ) address_amm_trades.update(new_address_trades) # Insert last used query range for new addresses for address in new_addresses: entry_name = f'{self.trades_prefix}_{address}' dbranges.update_used_query_range( location_string=entry_name, queried_ranges=[(start_ts, to_timestamp)], ) # Request existing DB addresses' trades if existing_addresses and to_timestamp > min_end_ts: address_new_trades = self._get_trades_graph( addresses=existing_addresses, start_ts=min_end_ts, end_ts=to_timestamp, ) address_amm_trades.update(address_new_trades) # Update last used query range for existing addresses for address in existing_addresses: entry_name = f'{self.trades_prefix}_{address}' dbranges.update_used_query_range( location_string=entry_name, queried_ranges=[(min_end_ts, to_timestamp)], ) # Insert all unique swaps to the DB all_swaps = set() for address in filter(lambda x: x in address_amm_trades, addresses): for trade in address_amm_trades[address]: for swap in trade.swaps: all_swaps.add(swap) self.database.add_amm_swaps(list(all_swaps)) return self._fetch_trades_from_db(addresses, from_timestamp, to_timestamp)