def test_balances_caching_mixup( rotkehlchen_api_server, ethereum_accounts, ): """Test that querying the balances in a specific order does not mix up the caches. This tests for the problem seen where the bitcoin balances being empty and queried first returned an empty result for the ethereum balances. """ rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=ethereum_accounts, btc_accounts=None, eth_balances=['1000000000000000000'], token_balances={A_RDN: ['2000000000000000000']}, original_queries=['zerion'], ) # Test all balances request by requesting to not save the data with ExitStack() as stack: setup.enter_blockchain_patches(stack) response_btc = requests.get(api_url_for( rotkehlchen_api_server, "named_blockchain_balances_resource", blockchain='BTC', ), json={'async_query': True}) response_eth = requests.get(api_url_for( rotkehlchen_api_server, "named_blockchain_balances_resource", blockchain='ETH', ), json={'async_query': True}) task_id_btc = assert_ok_async_response(response_btc) task_id_eth = assert_ok_async_response(response_eth) result_btc = wait_for_async_task_with_result( rotkehlchen_api_server, task_id_btc, ) result_eth = wait_for_async_task_with_result( server=rotkehlchen_api_server, task_id=task_id_eth, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) assert result_eth['per_account']['ETH'][ethereum_accounts[0]][ 'assets']['ETH']['amount'] == '1' # noqa: E501 assert result_eth['per_account']['ETH'][ethereum_accounts[0]][ 'assets'][A_RDN.identifier]['amount'] == '2' # noqa: E501 assert result_eth['totals']['assets']['ETH']['amount'] == '1' assert result_eth['totals']['assets'][ A_RDN.identifier]['amount'] == '2' assert result_eth['per_account']['ETH'][ethereum_accounts[0]][ 'assets'][A_RDN.identifier]['amount'] == '2' # noqa: E501 assert result_btc['per_account'] == {} assert result_btc['totals']['assets'] == {} assert result_btc['totals']['liabilities'] == {}
def test_query_historical_dsr( rotkehlchen_api_server, ethereum_accounts, inquirer, # pylint: disable=unused-argument ): """Test DSR history is correctly queried This (and the async version) is a very hard to maintain test due to mocking everything. TODO: Perhaps change it to querying etherscan/chain until a given block for a given DSR account and check that until then all data match. """ async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_tests_for_dsr( etherscan=rotki.etherscan, accounts=ethereum_accounts, original_requests_get=requests.get, ) with setup.etherscan_patch: response = requests.get(api_url_for( rotkehlchen_api_server, "makerdaodsrhistoryresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: outcome = assert_proper_response_with_result(response) assert_dsr_history_result_is_correct(outcome, setup)
def test_query_current_dsr_balance( rotkehlchen_api_server, ethereum_accounts, ): async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_tests_for_dsr( etherscan=rotki.etherscan, accounts=ethereum_accounts, original_requests_get=requests.get, ) with setup.etherscan_patch: response = requests.get(api_url_for( rotkehlchen_api_server, "makerdaodsrbalanceresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: outcome = assert_proper_response_with_result(response) assert_dsr_current_result_is_correct(outcome, setup)
def test_get_current_assets_price_in_btc(rotkehlchen_api_server): async_query = random.choice([False, True]) response = requests.get( api_url_for( rotkehlchen_api_server, "currentassetspriceresource", ), json={ 'assets': ['BTC', 'USD', 'GBP'], 'target_asset': 'BTC', 'async_query': async_query, }, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert len(result) == 2 assert result['assets']['BTC'] == '1' assert result['assets']['GBP'] == '0.00004119457641910343485018976024' assert result['assets']['USD'] == '0.00003013502298398202988309419184' assert result['target_asset'] == 'BTC'
def test_get_current_assets_price_in_usd(rotkehlchen_api_server): async_query = random.choice([False, True]) response = requests.get( api_url_for( rotkehlchen_api_server, "currentassetspriceresource", ), json={ 'assets': ['BTC', 'USD', 'GBP'], 'target_asset': 'USD', 'async_query': async_query, }, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert len(result) == 2 assert result['assets']['BTC'] == '33183.98' assert result['assets']['GBP'] == '1.367' assert result['assets']['USD'] == '1' assert result['target_asset'] == 'USD'
def test_eth2_add_eth1_account(rotkehlchen_api_server): """This test uses real data and tests that adding an ETH1 address with ETH2 deposits properly detects validators""" new_account = '0xa966B0eabCD717fa28Bd165F1cE160E7057FA369' async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen setup = setup_balances( rotki, ethereum_accounts=[new_account], btc_accounts=[], original_queries=['logs', 'transactions', 'blocknobytime', 'beaconchain'], ) with ExitStack() as stack: setup.enter_blockchain_patches(stack) data = {'accounts': [{'address': new_account}], 'async_query': async_query} response = requests.put(api_url_for( rotkehlchen_api_server, 'blockchainsaccountsresource', blockchain='ETH', ), json=data) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 4, ) else: result = assert_proper_response_with_result(response) # now get all detected validators response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), ) result = assert_proper_response_with_result(response) # That address has only 1 validator. If that changes in the future this # test will fail and we will need to adjust the test validator_pubkey = '0x800199f8f3af15a22c42ccd7185948870eceeba2d06199ea30e7e28eb976a69284e393ba2f401e8983d011534b303a57' # noqa: E501 assert len(result['entries']) == 1 assert result['entries'][0] == { 'validator_index': 227858, 'public_key': validator_pubkey, 'ownership_percentage': '100.00', } response = requests.get(api_url_for( rotkehlchen_api_server, 'blockchainbalancesresource', ), json={'blockchain': 'eth2'}) result = assert_proper_response_with_result(response) per_acc = result['per_account'] assert FVal(per_acc['ETH'][new_account]['assets']['ETH']['amount']) > ZERO assert FVal(per_acc['ETH2'][validator_pubkey]['assets']['ETH2']['amount']) > FVal('32.54') totals = result['totals']['assets'] assert FVal(totals['ETH']['amount']) > ZERO assert FVal(totals['ETH2']['amount']) > FVal('32.54')
def test_query_vaults(rotkehlchen_api_server, ethereum_accounts): """Check querying the vaults endpoint works. Uses real vault data""" async_query = random.choice([False, True]) rotki = rotkehlchen_api_server.rest_api.rotkehlchen proxies_mapping = { ethereum_accounts[0]: '0x689D4C2229717f877A644A0aAd742D67E5D0a2FB' } mock_proxies(rotki, proxies_mapping, 'makerdao_vaults') response = requests.get(api_url_for( rotkehlchen_api_server, "makerdaovaultsresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) vaults = wait_for_async_task_with_result( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 1.5, ) else: vaults = assert_proper_response_with_result(response) _check_vaults_values(vaults, ethereum_accounts[0]) response = requests.get(api_url_for( rotkehlchen_api_server, "makerdaovaultdetailsresource", ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) details = wait_for_async_task_with_result( rotkehlchen_api_server, task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 1.5, ) else: details = assert_proper_response_with_result(response) _check_vault_details_values( details=details, total_interest_owed_list=[FVal('0.2810015984764')], )
def query_api_create_and_get_report( server, start_ts: Timestamp, end_ts: Timestamp, prepare_mocks: bool, events_offset: Optional[int] = None, events_limit: Optional[int] = None, events_ascending_timestamp: bool = False, ): async_query = random.choice([False, True]) rotki = server.rest_api.rotkehlchen setup = None if prepare_mocks: setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, history_start_ts=start_ts, history_end_ts=end_ts, ) # Query history processing to start the history processing with ExitStack() as stack: if setup is not None: for manager in setup: stack.enter_context(manager) response = requests.get( api_url_for(server, 'historyprocessingresource'), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts, 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(server, task_id) else: outcome = assert_proper_response_with_result(response) report_id = outcome response = requests.get( api_url_for(server, 'per_report_resource', report_id=report_id), ) report_result = assert_proper_response_with_result(response) response = requests.post( api_url_for(server, 'per_report_data_resource', report_id=report_id), json={ 'offset': events_offset, 'limit': events_limit, 'ascending': events_ascending_timestamp, }, ) events_result = assert_proper_response_with_result(response) return report_id, report_result, events_result
def test_exchange_query_balances(rotkehlchen_api_server_with_exchanges): """Test that using the exchange balances query endpoint works fine""" async_query = random.choice([False, True]) # query balances of one specific exchange server = rotkehlchen_api_server_with_exchanges binance = server.rest_api.rotkehlchen.exchange_manager.connected_exchanges[ 'binance'] binance_patch = patch_binance_balances_query(binance) with binance_patch: response = requests.get(api_url_for( server, "named_exchanges_balances_resource", name='binance', ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(server, task_id) else: outcome = assert_proper_response_with_result(response) assert_binance_balances_result(outcome) # query balances of all setup exchanges poloniex = server.rest_api.rotkehlchen.exchange_manager.connected_exchanges[ 'poloniex'] poloniex_patch = patch_poloniex_balances_query(poloniex) with binance_patch, poloniex_patch: response = requests.get( api_url_for(server, "exchangebalancesresource"), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(server, task_id) else: result = assert_proper_response_with_result(response) assert_binance_balances_result(result['binance']) assert_poloniex_balances_result(result['poloniex'])
def test_query_avax_balances(rotkehlchen_api_server): """Test query the AVAX balances when multiple accounts are set up works as expected. """ async_query = random.choice([False, True]) setup = setup_balances( rotki=rotkehlchen_api_server.rest_api.rotkehlchen, ethereum_accounts=None, btc_accounts=None, eth_balances=None, token_balances=None, btc_balances=None, ) with ExitStack() as stack: setup.enter_blockchain_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server, 'named_blockchain_balances_resource', blockchain=SupportedBlockchain.AVALANCHE.value, ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) # Check per account account_1_balances = result['per_account']['AVAX'][ AVALANCHE_ACC1_AVAX_ADDR] assert 'liabilities' in account_1_balances asset_avax = account_1_balances['assets']['AVAX'] assert FVal(asset_avax['amount']) >= ZERO assert FVal(asset_avax['usd_value']) >= ZERO account_2_balances = result['per_account']['AVAX'][ AVALANCHE_ACC2_AVAX_ADDR] assert 'liabilities' in account_2_balances asset_avax = account_2_balances['assets']['AVAX'] assert FVal(asset_avax['amount']) >= ZERO assert FVal(asset_avax['usd_value']) >= ZERO # Check totals assert 'liabilities' in result['totals'] total_avax = result['totals']['assets']['AVAX'] assert FVal(total_avax['amount']) >= ZERO assert FVal(total_avax['usd_value']) >= ZERO
def test_account_without_info(rotkehlchen_api_server, inquirer): # pylint: disable=unused-argument """Test that we can get the status of the trove and the staked lqty""" async_query = random.choice([False, True]) response = requests.get(api_url_for( rotkehlchen_api_server, 'liquitytrovesresource', ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert ADDR_WITHOUT_TROVE not in result
def test_trove_staking(rotkehlchen_api_server, inquirer): # pylint: disable=unused-argument """Test that we can get the status of the staked lqty""" async_query = random.choice([False, True]) response = requests.get(api_url_for( rotkehlchen_api_server, 'liquitystakingresource', ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert LQTY_STAKING in result stake_data = result[LQTY_STAKING] assert 'amount' in stake_data and float(stake_data['amount']) > 0
def test_get_historical_assets_price(rotkehlchen_api_server): """Test given a list of asset-timestamp tuples it returns the asset price at the given timestamp. """ async_query = random.choice([False, True]) response = requests.post( api_url_for( rotkehlchen_api_server, "historicalassetspriceresource", ), json={ 'assets_timestamp': [ ['BTC', 1579543935], ['BTC', 1611166335], ['USD', 1579543935], ['GBP', 1548007935], ['GBP', 1611166335], ['XRP', 1611166335], ], 'target_asset': 'USD', 'async_query': async_query, }, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert len(result) == 2 assert result['assets']['BTC'] == { '1579543935': '30000', '1611166335': '35000', } assert result['assets']['USD'] == {'1579543935': '1'} assert result['assets']['GBP'] == { '1548007935': '1.25', '1611166335': '1.27', } assert result['assets']['XRP'] == {'1611166335': '0'} assert result['target_asset'] == 'USD'
def test_account_with_proxy(rotkehlchen_api_server, inquirer): # pylint: disable=unused-argument """Test that we can get the status of a trove created using DSProxy""" async_query = random.choice([False, True]) response = requests.get(api_url_for( rotkehlchen_api_server, 'liquitytrovesresource', ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert LQTY_PROXY in result assert ADDR_WITHOUT_TROVE not in result assert LQTY_ADDR in result # test that the list of addresses was not mutated rotki = rotkehlchen_api_server.rest_api.rotkehlchen assert len(rotki.chain_manager.accounts.eth) == 3
def test_trove_position(rotkehlchen_api_server, inquirer): # pylint: disable=unused-argument """Test that we can get the status of the user's troves""" async_query = random.choice([False, True]) response = requests.get(api_url_for( rotkehlchen_api_server, 'liquitytrovesresource', ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert LQTY_ADDR in result trove_data = result[LQTY_ADDR] assert 'collateral' in trove_data assert 'debt' in trove_data assert 'collateralization_ratio' in trove_data assert 'liquidation_price' in trove_data assert trove_data['active'] is True
def test_query_ksm_balances(rotkehlchen_api_server): """Test query the KSM balances when multiple accounts are set up works as expected. """ async_query = random.choice([False, True]) response = requests.get( api_url_for( rotkehlchen_api_server, "named_blockchain_balances_resource", blockchain=SupportedBlockchain.KUSAMA.value, ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) # Check per account account_1_balances = result['per_account']['KSM'][KSM_ADDRESS_1] assert 'liabilities' in account_1_balances asset_ksm = account_1_balances['assets']['KSM'] assert FVal(asset_ksm['amount']) >= ZERO assert FVal(asset_ksm['usd_value']) >= ZERO account_2_balances = result['per_account']['KSM'][KSM_ADDRESS_2] assert 'liabilities' in account_2_balances asset_ksm = account_2_balances['assets']['KSM'] assert FVal(asset_ksm['amount']) >= ZERO assert FVal(asset_ksm['usd_value']) >= ZERO # Check totals assert 'liabilities' in result['totals'] total_ksm = result['totals']['assets']['KSM'] assert FVal(total_ksm['amount']) >= ZERO assert FVal(total_ksm['usd_value']) >= ZERO
def test_pickle_dill( rotkehlchen_api_server, inquirer, # pylint: disable=unused-argument ): """Test that we can get the status of the trove and the staked lqty""" async_query = random.choice([False, True]) response = requests.get(api_url_for( rotkehlchen_api_server, 'pickledillresource', ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert PICKLE_ADDR in result data = result[PICKLE_ADDR] assert 'locked_amount' in data assert 'locked_until' in data assert 'pending_rewards' in data
def test_staking_events(rotkehlchen_api_server): """Test that Trove events events are correctly queried""" async_query = random.choice([True, False]) response = requests.get( api_url_for( rotkehlchen_api_server, 'liquitystakinghistoryresource', ), json={ 'async_query': async_query, 'from_timestamp': 0, 'to_timestamp': 1628026696, 'reset_db_data': False, }, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert LQTY_ADDR in result assert len(result[LQTY_ADDR]) == 1 trove_stake = result[LQTY_ADDR][0] tx_id = '0xe527749c76a3af56d86c97a8f8f8ce07e191721e9e16a0f62a228f8a8ef6d295' assert trove_stake['tx'] == tx_id assert trove_stake['timestamp'] == 1627827057 assert trove_stake['kind'] == 'stake' assert trove_stake['stake_after']['amount'] == trove_stake['stake_change'][ 'amount'] asset = trove_stake['stake_after']['asset'] assert asset == '_ceth_0x6DEA81C8171D0bA574754EF6F8b412F2Ed88c54D' assert trove_stake['stake_after']['amount'] == '177.02' assert trove_stake['stake_operation'] == 'stake created' assert trove_stake['sequence_number'] == '51676'
def test_multiple_balance_queries_not_concurrent( rotkehlchen_api_server_with_exchanges, ethereum_accounts, btc_accounts, separate_blockchain_calls, ): """Test multiple different balance query requests happening concurrently This tests that if multiple balance query requests happen concurrently we do not end up doing them multiple times, but reuse the results thanks to cache. Try running both all blockchain balances in one call and each blockchain call separately. """ rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = setup_balances(rotki, ethereum_accounts, btc_accounts) multieth_balance_patch = patch.object( rotki.chain_manager.ethereum, 'get_multieth_balance', wraps=rotki.chain_manager.ethereum.get_multieth_balance, ) btc_balances_patch = patch( 'rotkehlchen.chain.manager.get_bitcoin_addresses_balances', wraps=get_bitcoin_addresses_balances, ) binance = rotki.exchange_manager.connected_exchanges['binance'] binance_querydict_patch = patch.object(binance, 'api_query_dict', wraps=binance.api_query_dict) # Test all balances request by requesting to not save the data with ExitStack() as stack: setup.enter_all_patches(stack) eth = stack.enter_context(multieth_balance_patch) btc = stack.enter_context(btc_balances_patch) bn = stack.enter_context(binance_querydict_patch) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), json={'async_query': True}, ) task_id_all = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "named_exchanges_balances_resource", name='binance', ), json={'async_query': True}) task_id_one_exchange = assert_ok_async_response(response) if separate_blockchain_calls: response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={ 'async_query': True, 'blockchain': 'ETH' }) task_id_blockchain_eth = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={ 'async_query': True, 'blockchain': 'BTC' }) task_id_blockchain_btc = assert_ok_async_response(response) else: response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={'async_query': True}) task_id_blockchain = assert_ok_async_response(response) outcome_all = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_all, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) outcome_one_exchange = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id_one_exchange, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) if separate_blockchain_calls: outcome_eth = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain_eth, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) outcome_btc = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain_btc, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) else: outcome_blockchain = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain, timeout=ASYNC_TASK_WAIT_TIMEOUT * 2, ) assert eth.call_count == 1, 'eth balance query should only fire once' assert btc.call_count == 1, 'btc balance query should only happen once' assert bn.call_count == 3, 'binance balance query should do 2 calls' assert_all_balances( result=outcome_all, db=rotki.data.db, expected_data_in_db=True, setup=setup, ) assert_binance_balances_result(outcome_one_exchange['result']) if not separate_blockchain_calls: outcome_eth = outcome_blockchain outcome_btc = outcome_blockchain assert_eth_balances_result( rotki=rotki, result=outcome_eth, eth_accounts=ethereum_accounts, eth_balances=setup.eth_balances, token_balances=setup.token_balances, also_btc=not separate_blockchain_calls, ) assert_btc_balances_result( result=outcome_btc, btc_accounts=btc_accounts, btc_balances=setup.btc_balances, also_eth=not separate_blockchain_calls, )
def test_query_all_balances( rotkehlchen_api_server_with_exchanges, ethereum_accounts, btc_accounts, ): """Test that using the query all balances endpoint works Test that balances from various sources are returned. Such as exchanges, blockchain and manually tracked balances""" async_query = random.choice([False, True]) # Disable caching of query results rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen rotki.chain_manager.cache_ttl_secs = 0 setup = setup_balances( rotki=rotki, ethereum_accounts=ethereum_accounts, btc_accounts=btc_accounts, manually_tracked_balances=[ ManuallyTrackedBalance( asset=A_EUR, label='My EUR bank', amount=FVal('1550'), location=Location.BANKS, tags=None, ) ], ) # Test that all balances request saves data on a fresh account with ExitStack() as stack: setup.enter_all_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), json={'async_query': async_query}, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id, ) else: outcome = assert_proper_response_with_result(response) errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 0 assert_all_balances( result=outcome, db=rotki.data.db, expected_data_in_db=True, setup=setup, ) last_save_timestamp = rotki.data.db.get_last_balance_save_time() # now do the same but check to see if the balance save frequency delay works # and thus data will not be saved with ExitStack() as stack: setup.enter_all_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), ) assert_proper_response(response) new_save_timestamp = rotki.data.db.get_last_balance_save_time() assert last_save_timestamp == new_save_timestamp # wait for at least 1 second to make sure that new balances can be saved. # Can't save balances again if it's the same timestamp gevent.sleep(1) # now do the same but test that balance are saved since the balance save frequency delay # is overriden via `save_data` = True with ExitStack() as stack: setup.enter_all_patches(stack) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), json={'save_data': True}, ) assert_proper_response(response) new_save_timestamp = rotki.data.db.get_last_balance_save_time() assert last_save_timestamp != new_save_timestamp
def test_query_eth2_balances(rotkehlchen_api_server, query_all_balances): ownership_proportion = FVal(0.45) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), ) result = assert_proper_response_with_result(response) assert result == {'entries': [], 'entries_limit': -1, 'entries_found': 0} validators = [Eth2Validator( index=4235, public_key='0xadd548bb2e6962c255ec5420e40e6e506dfc936592c700d56718ada7dcc52e4295644ff8f94f4ef898aa8a5ad81a5b84', # noqa: E501 ownership_proportion=ONE, ), Eth2Validator( index=5235, public_key='0x827e0f30c3d34e3ee58957dd7956b0f194d64cc404fca4a7313dc1b25ac1f28dcaddf59d05fbda798fa5b894c91b84fb', # noqa: E501 ownership_proportion=ownership_proportion, )] response = requests.put( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), json={'validator_index': validators[0].index}, ) assert_simple_ok_response(response) response = requests.put( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), json={'public_key': validators[1].public_key, 'ownership_percentage': '45'}, ) assert_simple_ok_response(response) response = requests.get( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), ) result = assert_proper_response_with_result(response) assert result == {'entries': [x.serialize() for x in validators], 'entries_limit': -1, 'entries_found': 2} # noqa: E501 async_query = random.choice([False, True]) if query_all_balances: response = requests.get(api_url_for( rotkehlchen_api_server, 'blockchainbalancesresource', ), json={'async_query': async_query}) else: response = requests.get(api_url_for( rotkehlchen_api_server, 'named_blockchain_balances_resource', blockchain='ETH2', ), json={'async_query': async_query}) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result( server=rotkehlchen_api_server, task_id=task_id, timeout=ASYNC_TASK_WAIT_TIMEOUT * 5, ) else: outcome = assert_proper_response_with_result(response) assert len(outcome['per_account']) == 1 # only ETH2 per_acc = outcome['per_account']['ETH2'] assert len(per_acc) == 2 # hope they don't get slashed ;( amount1 = FVal('34.547410412') amount2 = FVal('34.600348623') * ownership_proportion assert FVal(per_acc[validators[0].public_key]['assets']['ETH2']['amount']) >= amount1 assert FVal(per_acc[validators[1].public_key]['assets']['ETH2']['amount']) >= amount2 totals = outcome['totals'] assert len(totals['assets']) == 1 assert len(totals['liabilities']) == 0 assert FVal(totals['assets']['ETH2']['amount']) >= amount1 + amount2 # now add 1 more validator and query ETH2 balances again to see it's included # the reason for this is to see the cache is properly invalidated at addition v0_pubkey = '0x933ad9491b62059dd065b560d256d8957a8c402cc6e8d8ee7290ae11e8f7329267a8811c397529dac52ae1342ba58c95' # noqa: E501 response = requests.put( api_url_for( rotkehlchen_api_server, 'eth2validatorsresource', ), json={'validator_index': 0, 'public_key': v0_pubkey}, ) assert_simple_ok_response(response) response = requests.get(api_url_for( rotkehlchen_api_server, 'named_blockchain_balances_resource', blockchain='ETH2', ), json={'async_query': False, 'ignore_cache': True}) outcome = assert_proper_response_with_result(response) assert len(outcome['per_account']) == 1 # only ETH2 per_acc = outcome['per_account']['ETH2'] assert len(per_acc) == 3 amount1 = FVal('34.596290288') amount2 = FVal('34.547410412') amount3 = FVal('34.600348623') * ownership_proportion assert FVal(per_acc[v0_pubkey]['assets']['ETH2']['amount']) >= amount1 assert FVal(per_acc[validators[0].public_key]['assets']['ETH2']['amount']) >= amount2 assert FVal(per_acc[validators[1].public_key]['assets']['ETH2']['amount']) >= amount3 totals = outcome['totals'] assert len(totals['assets']) == 1 assert len(totals['liabilities']) == 0 assert FVal(totals['assets']['ETH2']['amount']) >= amount1 + amount2 + amount3
def test_add_delete_xpub(rotkehlchen_api_server): """This test uses real world data (queries actual BTC balances) Test data from here: https://github.com/LedgerHQ/bitcoin-keychain-svc/blob/744736af1819cdab0a46ea7faf834008aeade6b1/integration/p2pkh_keychain_test.go#L40-L95 """ # Disable caching of query results rotki = rotkehlchen_api_server.rest_api.rotkehlchen rotki.chain_manager.cache_ttl_secs = 0 async_query = random.choice([False, True]) tag1 = { 'name': 'ledger', 'description': 'My ledger accounts', 'background_color': 'ffffff', 'foreground_color': '000000', } response = requests.put( api_url_for( rotkehlchen_api_server, 'tagsresource', ), json=tag1, ) tag2 = { 'name': 'public', 'description': 'My public accounts', 'background_color': 'ffffff', 'foreground_color': '000000', } response = requests.put( api_url_for( rotkehlchen_api_server, 'tagsresource', ), json=tag2, ) assert_proper_response(response) xpub1 = 'xpub6DCi5iJ57ZPd5qPzvTm5hUt6X23TJdh9H4NjNsNbt7t7UuTMJfawQWsdWRFhfLwkiMkB1rQ4ZJWLB9YBnzR7kbs9N8b2PsKZgKUHQm1X4or' # noqa : E501 xpub1_label = 'ledger_test_xpub' xpub1_tags = ['ledger', 'public'] json_data = { 'async_query': async_query, 'xpub': xpub1, 'label': xpub1_label, 'tags': xpub1_tags, } response = requests.put(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id, timeout=180) else: outcome = assert_proper_response_with_result(response) _check_xpub_addition_outcome(outcome, xpub1) # Make sure that adding existing xpub fails json_data = { 'async_query': False, 'xpub': xpub1, 'label': xpub1_label, 'tags': xpub1_tags, } response = requests.put(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) assert_error_response( response=response, contained_in_msg= f'Xpub {xpub1} with derivation path None is already tracked', status_code=HTTPStatus.BAD_REQUEST, ) # Add an xpub with no derived addresses xpub2 = 'xpub68V4ZQQ62mea7ZUKn2urQu47Bdn2Wr7SxrBxBDDwE3kjytj361YBGSKDT4WoBrE5htrSB8eAMe59NPnKrcAbiv2veN5GQUmfdjRddD1Hxrk' # noqa : E501 xpub2_label = None xpub2_tags = None json_data = { 'async_query': async_query, 'xpub': xpub2, 'label': xpub2_label, 'tags': xpub2_tags, } response = requests.put(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id, timeout=180) else: outcome = assert_proper_response_with_result(response) _check_xpub_addition_outcome(outcome, xpub1) # Also make sure that blockchain account data endpoint returns everything correctly response = requests.get( api_url_for( rotkehlchen_api_server, "blockchainsaccountsresource", blockchain='BTC', )) outcome = assert_proper_response_with_result(response) assert len(outcome['standalone']) == 2 for entry in outcome['standalone']: assert entry['address'] in (UNIT_BTC_ADDRESS1, UNIT_BTC_ADDRESS2) assert entry['tags'] is None assert entry['label'] is None assert len(outcome['xpubs']) == 2 for entry in outcome['xpubs']: assert len(entry) == 5 if entry['xpub'] == xpub1: for address_data in entry['addresses']: assert address_data['address'] in EXPECTED_XPUB_ADDESSES assert address_data['label'] is None assert address_data['tags'] == xpub1_tags else: assert entry['xpub'] == xpub2 assert entry['addresses'] is None assert entry['label'] is None assert entry['tags'] is None # Now delete the xpub and make sure all derived addresses are gone json_data = { 'async_query': async_query, 'xpub': xpub1, 'derivation_path': None, } response = requests.delete(api_url_for( rotkehlchen_api_server, "btcxpubresource", ), json=json_data) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result(rotkehlchen_api_server, task_id, timeout=180) else: outcome = assert_proper_response_with_result(response) btc = outcome['per_account']['BTC'] assert len(btc['standalone']) == 2 assert UNIT_BTC_ADDRESS1 in btc['standalone'] assert UNIT_BTC_ADDRESS2 in btc['standalone'] assert 'xpubs' not in btc assert outcome['totals']['BTC']['amount'] is not None assert outcome['totals']['BTC']['usd_value'] is not None # Also make sure all mappings are gone from the DB cursor = rotki.data.db.conn.cursor() result = cursor.execute( 'SELECT object_reference from tag_mappings;').fetchall() assert len(result) == 0, 'all tag mappings should have been deleted' result = cursor.execute('SELECT * from xpub_mappings WHERE xpub=?', (xpub1, )).fetchall() assert len(result) == 0, 'all xpub mappings should have been deleted'
def test_trove_events(rotkehlchen_api_server): """Test that Trove events events are correctly queried""" async_query = random.choice([True, False]) response = requests.get( api_url_for( rotkehlchen_api_server, 'liquitytroveshistoryresource', ), json={ 'async_query': async_query, 'from_timestamp': 0, 'to_timestamp': 1628026696, 'reset_db_data': False, }, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert LQTY_ADDR in result assert len(result[LQTY_ADDR]) == 2 trove_action = result[LQTY_ADDR][0] tx_id = '0xc8ad6f6ec244a93e1d66e60d1eab2ff2cb9de1f3a1f45c7bb4e9d2f720254137' assert trove_action['tx'] == tx_id assert trove_action['timestamp'] == 1627818194 assert trove_action['kind'] == 'trove' assert trove_action['debt_delta']['amount'] == trove_action['debt_after'][ 'amount'] assert trove_action['debt_delta']['amount'] == '6029.001719188487125' assert trove_action['trove_operation'] == 'Open Trove' assert trove_action['collateral_after']['amount'] == trove_action[ 'collateral_delta']['amount'] assert trove_action['collateral_delta']['amount'] == '3.5' assert trove_action['sequence_number'] == '74148' # Check for account with dsproxy response = requests.get( api_url_for( rotkehlchen_api_server, 'liquitytroveshistoryresource', ), json={ 'async_query': async_query, 'from_timestamp': 1641529258, 'to_timestamp': 1641529258, 'reset_db_data': False, }, ) if async_query: task_id = assert_ok_async_response(response) result = wait_for_async_task_with_result(rotkehlchen_api_server, task_id) else: result = assert_proper_response_with_result(response) assert len(result[LQTY_PROXY]) == 1 trove_action = result[LQTY_PROXY][0] tx_id = '0xef24b51a09151cce6728de1f9c3a0e69ca40db1dcc82f287a1743e41c90ce95b' assert trove_action['tx'] == tx_id assert trove_action['timestamp'] == 1641529258 assert trove_action['kind'] == 'trove' assert trove_action['debt_after']['amount'] == '0' assert trove_action['debt_delta']['amount'] == '-27436.074977906493051' assert trove_action['trove_operation'] == 'Liquidation In Normal Mode' assert trove_action['collateral_after']['amount'] == '0' assert trove_action['collateral_delta'][ 'amount'] == '-9.420492116554037728' assert trove_action['sequence_number'] == '105764'
def test_query_history(rotkehlchen_api_server_with_exchanges): """Test that the history processing REST API endpoint works. Similar to test_history.py""" async_query = random.choice([False, True]) start_ts = 0 end_ts = 1601040361 rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = prepare_rotki_for_history_processing_test( rotki, should_mock_history_processing=False, ) # Query history processing to start the history processing with ExitStack() as stack: for manager in setup: if manager is None: continue stack.enter_context(manager) response = requests.get( api_url_for(rotkehlchen_api_server_with_exchanges, "historyprocessingresource"), json={ 'from_timestamp': start_ts, 'to_timestamp': end_ts, 'async_query': async_query }, ) if async_query: task_id = assert_ok_async_response(response) outcome = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id, ) else: outcome = assert_proper_response_with_result(response) # Simply check that the results got returned here. The actual correctness of # accounting results is checked in other tests such as test_simple_accounting assert len(outcome) == 2 overview = outcome['overview'] assert len(overview) == 10 assert overview["loan_profit"] is not None assert overview["margin_positions_profit_loss"] is not None assert overview["settlement_losses"] is not None assert overview["ethereum_transaction_gas_costs"] is not None assert overview["asset_movement_fees"] is not None assert overview["general_trade_profit_loss"] is not None assert overview["taxable_trade_profit_loss"] is not None assert overview["total_taxable_profit_loss"] is not None assert overview["total_profit_loss"] is not None assert overview["defi_profit_loss"] is not None all_events = outcome['all_events'] assert isinstance(all_events, list) # TODO: These events are not actually checked anywhere for correctness # A test should probably be made for their correctness, even though # they are assumed correct if the overview is correct assert len(all_events) == 36 # And now make sure that warnings have also been generated for the query of # the unsupported/unknown assets warnings = rotki.msg_aggregator.consume_warnings() assert len(warnings) == 13 assert 'poloniex trade with unknown asset NOEXISTINGASSET' in warnings[0] assert 'poloniex trade with unsupported asset BALLS' in warnings[1] assert 'withdrawal of unknown poloniex asset IDONTEXIST' in warnings[2] assert 'withdrawal of unsupported poloniex asset DIS' in warnings[3] assert 'deposit of unknown poloniex asset IDONTEXIST' in warnings[4] assert 'deposit of unsupported poloniex asset EBT' in warnings[5] assert 'poloniex loan with unsupported asset BDC' in warnings[6] assert 'poloniex loan with unknown asset NOTEXISTINGASSET' in warnings[7] assert 'bittrex trade with unsupported asset PTON' in warnings[8] assert 'bittrex trade with unknown asset IDONTEXIST' in warnings[9] assert 'kraken trade with unknown asset IDONTEXISTTOO' in warnings[10] assert 'unknown kraken asset IDONTEXIST. Ignoring its deposit/withdrawals' in warnings[ 11] msg = 'unknown kraken asset IDONTEXISTEITHER. Ignoring its deposit/withdrawals query' assert msg in warnings[12] errors = rotki.msg_aggregator.consume_errors() assert len(errors) == 3 assert 'bittrex trade with unprocessable pair %$#%$#%#$%' in errors[0] assert 'kraken trade with unprocessable pair IDONTEXISTZEUR' in errors[1] assert 'kraken trade with unprocessable pair %$#%$#%$#%$#%$#%' in errors[2]
def test_multiple_balance_queries_not_concurrent( rotkehlchen_api_server_with_exchanges, ethereum_accounts, btc_accounts, ): """Test multiple different balance query requests happening concurrently This tests that if multiple balance query requests happen concurrently we do not end up doing them multiple times, but reuse the results thanks to cache. """ rotki = rotkehlchen_api_server_with_exchanges.rest_api.rotkehlchen setup = setup_balances(rotki, ethereum_accounts, btc_accounts) multieth_balance_patch = patch.object( rotki.chain_manager.ethereum, 'get_multieth_balance', wraps=rotki.chain_manager.ethereum.get_multieth_balance, ) binance = rotki.exchange_manager.connected_exchanges['binance'] binance_querydict_patch = patch.object(binance, 'api_query_dict', wraps=binance.api_query_dict) # Test all balances request by requesting to not save the data with ExitStack() as stack: setup.enter_all_patches(stack) eth = stack.enter_context(multieth_balance_patch) bn = stack.enter_context(binance_querydict_patch) response = requests.get( api_url_for( rotkehlchen_api_server_with_exchanges, "allbalancesresource", ), json={'async_query': True}, ) task_id_all = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "named_exchanges_balances_resource", name='binance', ), json={'async_query': True}) task_id_one_exchange = assert_ok_async_response(response) response = requests.get(api_url_for( rotkehlchen_api_server_with_exchanges, "blockchainbalancesresource", ), json={'async_query': True}) task_id_blockchain = assert_ok_async_response(response) outcome_all = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_all, ) outcome_one_exchange = wait_for_async_task( rotkehlchen_api_server_with_exchanges, task_id_one_exchange, ) outcome_blockchain = wait_for_async_task_with_result( rotkehlchen_api_server_with_exchanges, task_id_blockchain, ) assert eth.call_count == 1, 'blockchain balance call should not happen concurrently' assert bn.call_count == 1, 'binance balance call should not happen concurrently' assert_all_balances( result=outcome_all, db=rotki.data.db, expected_data_in_db=True, setup=setup, ) assert_binance_balances_result(outcome_one_exchange['result']) assert_eth_balances_result( rotki=rotki, result=outcome_blockchain, eth_accounts=ethereum_accounts, eth_balances=setup.eth_balances, token_balances=setup.token_balances, also_btc=True, )