def test_get_transaction_receipt(database, ethereum_manager, call_order, transaction_already_queried): # pylint: disable=unused-argument # noqa: E501 """Test that getting a transaction receipt from the network and saving it in the DB works""" transactions, receipts = setup_ethereum_transactions_test( database=database, transaction_already_queried=transaction_already_queried, one_receipt_in_db=False, ) tx_hash = '0x' + transactions[0].tx_hash.hex() txmodule = EthTransactions(ethereum=ethereum_manager, database=database) receipt = txmodule.get_or_query_transaction_receipt(tx_hash) assert receipt == receipts[0] results, _ = txmodule.query( ETHTransactionsFilterQuery.make(tx_hash=tx_hash), only_cache=True) assert len(results) == 1 assert results[0] == transactions[0]
def get_history( self, start_ts: Timestamp, end_ts: Timestamp, has_premium: bool, ) -> HistoryResult: """Creates trades and loans history from start_ts to end_ts""" self._reset_variables() step = 0 total_steps = len( self.exchange_manager.connected_exchanges ) + NUM_HISTORY_QUERY_STEPS_EXCL_EXCHANGES # noqa: E501 log.info( 'Get/create trade history', start_ts=start_ts, end_ts=end_ts, ) # start creating the all trades history list history: List[Union[Trade, MarginPosition, AMMTrade]] = [] asset_movements = [] ledger_actions = [] loans = [] empty_or_error = '' def populate_history_cb( trades_history: List[Trade], margin_history: List[MarginPosition], result_asset_movements: List[AssetMovement], result_ledger_actions: List[LedgerAction], exchange_specific_data: Any, ) -> None: """This callback will run for succesfull exchange history query""" history.extend(trades_history) history.extend(margin_history) asset_movements.extend(result_asset_movements) ledger_actions.extend(result_ledger_actions) if exchange_specific_data: # This can only be poloniex at the moment polo_loans_data = exchange_specific_data loans.extend( process_polo_loans( msg_aggregator=self.msg_aggregator, data=polo_loans_data, # We need to have history of loans since before the range start_ts=Timestamp(0), end_ts=end_ts, )) def fail_history_cb(error_msg: str) -> None: """This callback will run for failure in exchange history query""" nonlocal empty_or_error empty_or_error += '\n' + error_msg for exchange in self.exchange_manager.iterate_exchanges(): self.processing_state_name = f'Querying {exchange.name} exchange history' exchange.query_history_with_callbacks( # We need to have history of exchanges since before the range start_ts=Timestamp(0), end_ts=end_ts, success_callback=populate_history_cb, fail_callback=fail_history_cb, ) step = self._increase_progress(step, total_steps) try: self.processing_state_name = 'Querying ethereum transactions history' filter_query = ETHTransactionsFilterQuery.make( order_ascending= True, # for history processing we need oldest first limit=None, offset=None, addresses=None, # We need to have history of transactions since before the range from_ts=Timestamp(0), to_ts=end_ts, ) ethtx_module = EthTransactions( ethereum=self.chain_manager.ethereum, database=self.db) eth_transactions, _ = ethtx_module.query( filter_query=filter_query, with_limit= False, # at the moment ignore the limit for historical processing only_cache=False, ) except RemoteError as e: eth_transactions = [] msg = str(e) self.msg_aggregator.add_error( f'There was an error when querying etherscan for ethereum transactions: {msg}' f'The final history result will not include ethereum transactions', ) empty_or_error += '\n' + msg step = self._increase_progress(step, total_steps) # Include all external trades and trades from external exchanges for location in EXTERNAL_LOCATION: self.processing_state_name = f'Querying {location} trades history' external_trades = self.db.get_trades( filter_query=TradesFilterQuery.make(location=location), has_premium= True, # we need all trades for accounting -- limit happens later ) history.extend(external_trades) step = self._increase_progress(step, total_steps) # include the ledger actions from offline sources self.processing_state_name = 'Querying ledger actions history' offline_ledger_actions, _ = self.query_ledger_actions( filter_query=LedgerActionsFilterQuery.make(), only_cache=True, ) unique_ledger_actions = list( set(offline_ledger_actions) - set(ledger_actions)) ledger_actions.extend(unique_ledger_actions) step = self._increase_progress(step, total_steps) # include AMM trades: balancer, uniswap for amm_location in AMMTradeLocations: amm_module_name = cast(AMMTRADE_LOCATION_NAMES, str(amm_location)) amm_module = self.chain_manager.get_module(amm_module_name) if has_premium and amm_module: self.processing_state_name = f'Querying {amm_module_name} trade history' amm_module_trades = amm_module.get_trades( addresses=self.chain_manager.queried_addresses_for_module( amm_module_name), from_timestamp=Timestamp(0), to_timestamp=end_ts, only_cache=False, ) history.extend(amm_module_trades) step = self._increase_progress(step, total_steps) # Include makerdao DSR gains defi_events = [] makerdao_dsr = self.chain_manager.get_module('makerdao_dsr') if makerdao_dsr and has_premium: self.processing_state_name = 'Querying makerDAO DSR history' defi_events.extend( makerdao_dsr.get_history_events( from_timestamp=Timestamp( 0), # we need to process all events from history start to_timestamp=end_ts, )) step = self._increase_progress(step, total_steps) # Include makerdao vault events makerdao_vaults = self.chain_manager.get_module('makerdao_vaults') if makerdao_vaults and has_premium: self.processing_state_name = 'Querying makerDAO vaults history' defi_events.extend( makerdao_vaults.get_history_events( from_timestamp=Timestamp( 0), # we need to process all events from history start to_timestamp=end_ts, )) step = self._increase_progress(step, total_steps) # include yearn vault events yearn_vaults = self.chain_manager.get_module('yearn_vaults') if yearn_vaults and has_premium: self.processing_state_name = 'Querying yearn vaults history' defi_events.extend( yearn_vaults.get_history_events( from_timestamp=Timestamp( 0), # we need to process all events from history start to_timestamp=end_ts, addresses=self.chain_manager.queried_addresses_for_module( 'yearn_vaults'), )) step = self._increase_progress(step, total_steps) # include compound events compound = self.chain_manager.get_module('compound') if compound and has_premium: self.processing_state_name = 'Querying compound history' try: # we need to process all events from history start defi_events.extend( compound.get_history_events( from_timestamp=Timestamp(0), to_timestamp=end_ts, addresses=self.chain_manager. queried_addresses_for_module('compound'), )) except RemoteError as e: self.msg_aggregator.add_error( SUBGRAPH_REMOTE_ERROR_MSG.format(protocol="Compound", error_msg=str(e)), ) step = self._increase_progress(step, total_steps) # include adex events adex = self.chain_manager.get_module('adex') if adex is not None and has_premium: self.processing_state_name = 'Querying adex staking history' defi_events.extend( adex.get_history_events( from_timestamp=start_ts, to_timestamp=end_ts, addresses=self.chain_manager.queried_addresses_for_module( 'adex'), )) step = self._increase_progress(step, total_steps) # include aave events aave = self.chain_manager.get_module('aave') if aave is not None and has_premium: self.processing_state_name = 'Querying aave history' defi_events.extend( aave.get_history_events( from_timestamp=start_ts, to_timestamp=end_ts, addresses=self.chain_manager.queried_addresses_for_module( 'aave'), )) step = self._increase_progress(step, total_steps) # include eth2 staking events eth2 = self.chain_manager.get_module('eth2') if eth2 is not None and has_premium: self.processing_state_name = 'Querying ETH2 staking history' try: eth2_events = self.chain_manager.get_eth2_history_events( from_timestamp=start_ts, to_timestamp=end_ts, ) defi_events.extend(eth2_events) except RemoteError as e: self.msg_aggregator.add_error( f'Eth2 events are not included in the PnL report due to {str(e)}', ) step = self._increase_progress(step, total_steps) # include liquity events liquity = self.chain_manager.get_module('liquity') if liquity is not None and has_premium: self.processing_state_name = 'Querying Liquity staking history' defi_events.extend( liquity.get_history_events( from_timestamp=start_ts, to_timestamp=end_ts, addresses=self.chain_manager.queried_addresses_for_module( 'liquity'), )) self._increase_progress(step, total_steps) history.sort(key=action_get_timestamp) return ( empty_or_error, history, loans, asset_movements, eth_transactions, defi_events, ledger_actions, )